diff --git a/.travis.yml b/.travis.yml index ab421366db6a..c023116917a3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,9 +10,6 @@ before_install: install: mvn install -DskipTests=true -Dgpg.skip=true script: - utilities/verify.sh -branches: - only: - - master after_success: - utilities/after_success.sh env: diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 167bf18e5082..bf87d471e34a 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -15,6 +15,22 @@ Using maven for build/test After you cloned the repository use Maven for building and running the tests. Maven 3.0+ is required. +When downloading the source, we recommend you obtain service account credentials. +These credentials will allow you to run integration tests using `mvn verify` in command line. +Follow step 2 of the [authentication instructions](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) to generate and use JSON service account credentials. + +It's also important to test that changes don't break compatibility with App/Compute Engine and when running elsewhere. +To run tests on different platforms, try deploying the apps available on the [gcloud-java-examples](https://github.com/GoogleCloudPlatform/gcloud-java-examples) repository. +End-to-end tests should ensure that gcloud-java works when running on the + +* App Engine production environment (see the docs for [uploading your app to production App Engine](https://cloud.google.com/appengine/docs/java/tools/maven#uploading_your_app_to_production_app_engine)) +* App Engine development server (see the docs for [testing your app with the development server](https://cloud.google.com/appengine/docs/java/tools/maven#testing_your_app_with_the_development_server)) +* Compute Engine (see the [Getting Started Guide](https://cloud.google.com/compute/docs/quickstart), and be sure to [enable the appropriate APIs](https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication#on-google-compute-engine)) +* Your desktop (using `mvn exec:java`, for example) + +When changes are made to authentication and project ID-related code, authentication and project ID inference should be tested using all relevant methods detailed in the [authentication docs](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) and [project ID docs](https://github.com/GoogleCloudPlatform/gcloud-java#specifying-a-project-id). + +Known issue: If you have installed the Google Cloud SDK, be sure to log in (using `gcloud auth login`) before running tests. Though the Datastore tests use a local Datastore emulator that doesn't require authentication, they will not run if you have the Google Cloud SDK installed but aren't authenticated. Adding Features --------------- diff --git a/README.md b/README.md index 2887f5a1a9a1..df68cd18005d 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,9 @@ Java idiomatic client for [Google Cloud Platform][cloud-platform] services. This client supports the following Google Cloud Platform services: +- [Google Cloud BigQuery] (#google-cloud-bigquery-alpha) (Alpha) - [Google Cloud Datastore] (#google-cloud-datastore) +- [Google Cloud Resource Manager] (#google-cloud-resource-manager-alpha) (Alpha) - [Google Cloud Storage] (#google-cloud-storage) > Note: This client is a work-in-progress, and may occasionally @@ -20,32 +22,142 @@ This client supports the following Google Cloud Platform services: Quickstart ---------- -Add this to your pom.xml file +If you are using Maven, add this to your pom.xml file ```xml com.google.gcloud gcloud-java - 0.0.10 + 0.1.3 ``` +If you are using Gradle, add this to your dependencies +```Groovy +compile 'com.google.gcloud:gcloud-java:0.1.3' +``` +If you are using SBT, add this to your dependencies +```Scala +libraryDependencies += "com.google.gcloud" % "gcloud-java" % "0.1.3" +``` Example Applications -------------------- +- [`BigQueryExample`](https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/gcloud-java-examples/src/main/java/com/google/gcloud/examples/BigQueryExample.java) - A simple command line interface providing some of Cloud BigQuery's functionality + - Read more about using this application on the [`gcloud-java-examples` docs page](http://googlecloudplatform.github.io/gcloud-java/apidocs/?com/google/gcloud/examples/BigQueryExample.html). +- [`Bookshelf`](https://github.com/GoogleCloudPlatform/getting-started-java/tree/master/bookshelf) - An App Engine app that manages a virtual bookshelf. + - This app uses `gcloud-java` to interface with Cloud Datastore and Cloud Storage. It also uses Cloud SQL, another Google Cloud Platform service. - [`DatastoreExample`](https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/gcloud-java-examples/src/main/java/com/google/gcloud/examples/DatastoreExample.java) - A simple command line interface for the Cloud Datastore - Read more about using this application on the [`gcloud-java-examples` docs page](http://googlecloudplatform.github.io/gcloud-java/apidocs/?com/google/gcloud/examples/DatastoreExample.html). +- [`ResourceManagerExample`](https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/gcloud-java-examples/src/main/java/com/google/gcloud/examples/ResourceManagerExample.java) - A simple command line interface providing some of Cloud Resource Manager's functionality + - Read more about using this application on the [`gcloud-java-examples` docs page](http://googlecloudplatform.github.io/gcloud-java/apidocs/?com/google/gcloud/examples/ResourceManagerExample.html). +- [`SparkDemo`](https://github.com/GoogleCloudPlatform/java-docs-samples/blob/master/managedvms/sparkjava) - An example of using gcloud-java-datastore from within the SparkJava and App Engine Managed VM frameworks. + - Read about how it works on the example's [README page](https://github.com/GoogleCloudPlatform/java-docs-samples/tree/master/managedvms/sparkjava#how-does-it-work). - [`StorageExample`](https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/gcloud-java-examples/src/main/java/com/google/gcloud/examples/StorageExample.java) - A simple command line interface providing some of Cloud Storage's functionality - Read more about using this application on the [`gcloud-java-examples` docs page](http://googlecloudplatform.github.io/gcloud-java/apidocs/?com/google/gcloud/examples/StorageExample.html). +Specifying a Project ID +----------------------- + +Most `gcloud-java` libraries require a project ID. There are multiple ways to specify this project ID. + +1. When using `gcloud-java` libraries from within Compute/App Engine, there's no need to specify a project ID. It is automatically inferred from the production environment. +2. When using `gcloud-java` elsewhere, you can do one of the following: + * Supply the project ID when building the service options. For example, to use Datastore from a project with ID "PROJECT_ID", you can write: + + ```java + Datastore datastore = DatastoreOptions.builder().projectId("PROJECT_ID").build().service(); + ``` + * Specify the environment variable `GCLOUD_PROJECT` to be your desired project ID. + * Set the project ID using the [Google Cloud SDK](https://cloud.google.com/sdk/?hl=en). To use the SDK, [download the SDK](https://cloud.google.com/sdk/?hl=en) if you haven't already, and set the project ID from the command line. For example: + + ``` + gcloud config set project PROJECT_ID + ``` + +`gcloud-java` determines the project ID from the following sources in the listed order, stopping once it finds a value: + +1. Project ID supplied when building the service options +2. Project ID specified by the environment variable `GCLOUD_PROJECT` +3. App Engine project ID +4. Google Cloud SDK project ID +5. Compute Engine project ID + Authentication -------------- -There are multiple ways to authenticate to use Google Cloud services. +First, ensure that the necessary Google Cloud APIs are enabled for your project. To do this, follow the instructions on the [authentication document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/authentication/readme.md#authentication) shared by all the gcloud language libraries. + +Next, choose a method for authenticating API requests from within your project: 1. When using `gcloud-java` libraries from within Compute/App Engine, no additional authentication steps are necessary. 2. When using `gcloud-java` libraries elsewhere, there are two options: - * [Generate a JSON service account key](https://cloud.google.com/storage/docs/authentication?hl=en#service_accounts). Supply a path to the downloaded JSON credentials file when building the options supplied to datastore/storage constructor. - * If running locally for development/testing, you can use use [Google Cloud SDK](https://cloud.google.com/sdk/?hl=en). To use the SDK authentication, [download the SDK](https://cloud.google.com/sdk/?hl=en) if you haven't already. Then login using the SDK (`gcloud auth login` in command line), and set your current project using `gcloud config set project PROJECT_ID`. + * [Generate a JSON service account key](https://cloud.google.com/storage/docs/authentication?hl=en#service_accounts). After downloading that key, you must do one of the following: + * Define the environment variable GOOGLE_APPLICATION_CREDENTIALS to be the location of the key. For example: + ```bash + export GOOGLE_APPLICATION_CREDENTIALS=/path/to/my/key.json + ``` + * Supply the JSON credentials file when building the service options. For example, this Storage object has the necessary permissions to interact with your Google Cloud Storage data: + ```java + Storage storage = StorageOptions.builder() + .authCredentials(AuthCredentials.createForJson(new FileInputStream("/path/to/my/key.json")) + .build() + .service(); + ``` + * If running locally for development/testing, you can use use Google Cloud SDK. Download the SDK if you haven't already, then login using the SDK (`gcloud auth login` in command line). Be sure to set your project ID as described above. + +`gcloud-java` looks for credentials in the following order, stopping once it finds credentials: + +1. Credentials supplied when building the service options +2. App Engine credentials +3. Key file pointed to by the GOOGLE_APPLICATION_CREDENTIALS environment variable +4. Google Cloud SDK credentials +5. Compute Engine credentials + +Google Cloud BigQuery (Alpha) +---------------------- + +- [API Documentation][bigquery-api] +- [Official Documentation][cloud-bigquery-docs] + +#### Preview + +Here is a code snippet showing a simple usage example from within Compute/App Engine. Note that you +must [supply credentials](#authentication) and a project ID if running this snippet elsewhere. + +```java +import com.google.gcloud.bigquery.BaseTableInfo; +import com.google.gcloud.bigquery.BigQuery; +import com.google.gcloud.bigquery.BigQueryOptions; +import com.google.gcloud.bigquery.Field; +import com.google.gcloud.bigquery.JobStatus; +import com.google.gcloud.bigquery.JobInfo; +import com.google.gcloud.bigquery.Schema; +import com.google.gcloud.bigquery.TableId; +import com.google.gcloud.bigquery.TableInfo; + +BigQuery bigquery = BigQueryOptions.defaultInstance().service(); +TableId tableId = TableId.of("dataset", "table"); +BaseTableInfo info = bigquery.getTable(tableId); +if (info == null) { + System.out.println("Creating table " + tableId); + Field integerField = Field.of("fieldName", Field.Type.integer()); + bigquery.create(TableInfo.of(tableId, Schema.of(integerField))); +} else { + System.out.println("Loading data into table " + tableId); + LoadJobConfiguration configuration = LoadJobConfiguration.of(tableId, "gs://bucket/path"); + JobInfo loadJob = JobInfo.of(configuration); + loadJob = bigquery.create(loadJob); + while (loadJob.status().state() != JobStatus.State.DONE) { + Thread.sleep(1000L); + loadJob = bigquery.getJob(loadJob.jobId()); + } + if (loadJob.status().error() != null) { + System.out.println("Job completed with errors"); + } else { + System.out.println("Job succeeded"); + } +} +``` Google Cloud Datastore ---------------------- @@ -61,14 +173,13 @@ Here is a code snippet showing a simple usage example from within Compute/App En ```java import com.google.gcloud.datastore.Datastore; -import com.google.gcloud.datastore.DatastoreFactory; import com.google.gcloud.datastore.DatastoreOptions; import com.google.gcloud.datastore.DateTime; import com.google.gcloud.datastore.Entity; import com.google.gcloud.datastore.Key; import com.google.gcloud.datastore.KeyFactory; -Datastore datastore = DatastoreFactory.instance().get(DatastoreOptions.getDefaultInstance()); +Datastore datastore = DatastoreOptions.defaultInstance().service(); KeyFactory keyFactory = datastore.newKeyFactory().kind(KIND); Key key = keyFactory.newKey(keyName); Entity entity = datastore.get(key); @@ -88,6 +199,39 @@ if (entity == null) { } ``` +Google Cloud Resource Manager (Alpha) +---------------------- + +- [API Documentation][resourcemanager-api] +- [Official Documentation][cloud-resourcemanager-docs] + +#### Preview + +Here is a code snippet showing a simple usage example. Note that you must supply Google SDK credentials for this service, not other forms of authentication listed in the [Authentication section](#authentication). + +```java +import com.google.gcloud.resourcemanager.Project; +import com.google.gcloud.resourcemanager.ResourceManager; +import com.google.gcloud.resourcemanager.ResourceManagerOptions; + +import java.util.Iterator; + +ResourceManager resourceManager = ResourceManagerOptions.defaultInstance().service(); +Project myProject = resourceManager.get("some-project-id"); // Use an existing project's ID +Project newProject = myProject.toBuilder() + .addLabel("launch-status", "in-development") + .build() + .replace(); +System.out.println("Updated the labels of project " + newProject.projectId() + + " to be " + newProject.labels()); +// List all the projects you have permission to view. +Iterator projectIterator = resourceManager.list().iterateAll(); +System.out.println("Projects I can view:"); +while (projectIterator.hasNext()) { + System.out.println(projectIterator.next().projectId()); +} +``` + Google Cloud Storage ---------------------- @@ -106,16 +250,14 @@ import static java.nio.charset.StandardCharsets.UTF_8; import com.google.gcloud.storage.Blob; import com.google.gcloud.storage.BlobId; import com.google.gcloud.storage.Storage; -import com.google.gcloud.storage.StorageFactory; import com.google.gcloud.storage.StorageOptions; import java.nio.ByteBuffer; import java.nio.channels.WritableByteChannel; -StorageOptions options = StorageOptions.builder().projectId("project").build(); -Storage storage = StorageFactory.instance().get(options); +Storage storage = StorageOptions.defaultInstance().service(); BlobId blobId = BlobId.of("bucket", "blob_name"); -Blob blob = Blob.load(storage, blobId); +Blob blob = Blob.get(storage, blobId); if (blob == null) { BlobInfo blobInfo = BlobInfo.builder(blobId).contentType("text/plain").build(); storage.create(blobInfo, "Hello, Cloud Storage!".getBytes(UTF_8)); @@ -129,6 +271,11 @@ if (blob == null) { } ``` +Troubleshooting +--------------- + +To get help, follow the `gcloud-java` links in the `gcloud-*` [shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting). + Java Versions ------------- @@ -155,7 +302,7 @@ Contributing Contributions to this library are always welcome and highly encouraged. -See [CONTRIBUTING] for more information on how to get started. +See `gcloud-java`'s [CONTRIBUTING] documentation and the `gcloud-*` [shared documentation](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/contributing/readme.md#how-to-contribute-to-gcloud) for more information on how to get started. Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information. @@ -166,7 +313,7 @@ Apache 2.0 - See [LICENSE] for more information. [CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md -[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md +[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct [LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE [TESTING]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/TESTING.md [cloud-platform]: https://cloud.google.com/ @@ -183,3 +330,10 @@ Apache 2.0 - See [LICENSE] for more information. [cloud-storage-create-bucket]: https://cloud.google.com/storage/docs/cloud-console#_creatingbuckets [cloud-storage-activation]: https://cloud.google.com/storage/docs/signup [storage-api]: http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/storage/package-summary.html + +[resourcemanager-api]:http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/resourcemanager/package-summary.html +[cloud-resourcemanager-docs]:https://cloud.google.com/resource-manager/ + +[cloud-bigquery]: https://cloud.google.com/bigquery/ +[cloud-bigquery-docs]: https://cloud.google.com/bigquery/docs/overview +[bigquery-api]: http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/bigquery/package-summary.html diff --git a/RELEASING.md b/RELEASING.md index 419f723fe328..5e2d6202062e 100644 --- a/RELEASING.md +++ b/RELEASING.md @@ -10,13 +10,23 @@ This script takes an optional argument denoting the new version. By default, if 2. Create a PR to update the pom.xml version. The PR should look something like [#225](https://github.com/GoogleCloudPlatform/gcloud-java/pull/225). After this PR is merged into GoogleCloudPlatform/gcloud-java, Travis CI will push a new website to GoogleCloudPlatform/gh-pages, push a new artifact to the Maven Central Repository, and update versions in the README files. -3. Create a release on Github manually. -Go to the [releases page](https://github.com/GoogleCloudPlatform/gcloud-java/releases) and click "Draft a new release." Use `vX.Y.Z` as the "Tag Version" and `X.Y.Z` as the "Release Title", where `X.Y.Z` is the release version as listed in the `pom.xml` files. +3. Before moving on, verify that the artifacts have successfully been pushed to the Maven Central Repository. Open Travis CI, click the ["Build History" tab](https://travis-ci.org/GoogleCloudPlatform/gcloud-java/builds), and open the second build's logs for Step 2's PR. Be sure that you are not opening the "Pull Request" build logs. When the build finishes, scroll to the end of the log and verify that the artifacts were successfully staged and deployed. You can also search for `gcloud-java` on the [Sonatype website](https://oss.sonatype.org/#nexus-search;quick~gcloud-java) and check the latest version number. If the deployment didn't succeed because of a flaky test, rerun the build. -4. Run `utilities/update_pom_version.sh` again (to include "-SNAPSHOT" in the project version). +4. Publish a release on Github manually. +Go to the [releases page](https://github.com/GoogleCloudPlatform/gcloud-java/releases) and open the appropriate release draft. Make sure the "Tag Version" is `vX.Y.Z` and the "Release Title" is `X.Y.Z`, where `X.Y.Z` is the release version as listed in the `pom.xml` files. The draft should already have all changes that impact users since the previous release. To double check this, you can use the `git log` command and look through the merged master branch pull requests. Here is an example of the log command to get non-merge commits between v0.0.12 and v0.1.0: + + ``` + git --no-pager log v0.0.12..v0.1.0 --pretty=oneline --abbrev-commit --no-merges + ``` + + Ensure that the format is consistent with previous releases (for an example, see the [0.1.0 release](https://github.com/GoogleCloudPlatform/gcloud-java/releases/tag/v0.1.0)). After adding any missing updates and reformatting as necessary, publish the draft. Finally, create a new draft for the next release. + +5. Run `utilities/update_pom_version.sh` again (to include "-SNAPSHOT" in the project version). As mentioned before, there is an optional version argument. By default, the script will update the version from "X.Y.Z" to "X.Y.Z+1-SNAPSHOT". Suppose a different version is desired, for example X+1.0.0-SNAPSHOT. Then the appropriate command to run would be `utilities/update_pom_version.sh X+1.0.0-SNAPSHOT`. -5. Create and merge in another PR to reflect the updated project version. For an example of what this PR should look like, see [#227](https://github.com/GoogleCloudPlatform/gcloud-java/pull/227). +6. Create and merge in another PR to reflect the updated project version. For an example of what this PR should look like, see [#227](https://github.com/GoogleCloudPlatform/gcloud-java/pull/227). + +7. Be sure to update App Engine documentation and [java-docs-samples](https://github.com/GoogleCloudPlatform/java-docs-samples) code as necessary. See directions [here](https://docs.google.com/a/google.com/document/d/1SS3xNn2v0qW7EadGUPBUAPIQAH5VY6WSFmT17ZjjUVE/edit?usp=sharing). ### To push a snapshot version diff --git a/TESTING.md b/TESTING.md index 158d71be2e91..3ad181310b17 100644 --- a/TESTING.md +++ b/TESTING.md @@ -1,6 +1,11 @@ ## gcloud-java tools for testing -This library provides tools to help write tests for code that uses gcloud-java services. +This library provides tools to help write tests for code that uses the following gcloud-java services: + +- [Datastore] (#testing-code-that-uses-datastore) +- [Storage] (#testing-code-that-uses-storage) +- [Resource Manager] (#testing-code-that-uses-resource-manager) +- [BigQuery] (#testing-code-that-uses-bigquery) ### Testing code that uses Datastore @@ -18,7 +23,7 @@ You can test against a temporary local datastore by following these steps: .projectId(PROJECT_ID) .host("http://localhost:8080") .build(); - Datastore localDatastore = DatastoreFactory.instance().get(options); + Datastore localDatastore = options.service(); ``` 3. Run your tests. @@ -35,7 +40,7 @@ You can test against a remote datastore emulator as well. To do this, set the ` .projectId(PROJECT_ID) .host("http://:") .build(); - Datastore localDatastore = DatastoreFactory.instance().get(options); + Datastore localDatastore = options.service(); ``` Note that the remote datastore must be running before your tests are run. @@ -51,8 +56,9 @@ Currently, there isn't an emulator for Google Cloud Storage, so an alternative i 3. Create a `RemoteGcsHelper` object using your project ID and JSON key. Here is an example that uses the `RemoteGcsHelper` to create a bucket. ```java - RemoteGcsHelper gcsHelper = RemoteGcsHelper.create(PROJECT_ID, "/path/to/my/JSON/key.json"); - Storage storage = StorageFactory.instance().get(gcsHelper.options()); + RemoteGcsHelper gcsHelper = + RemoteGcsHelper.create(PROJECT_ID, new FileInputStream("/path/to/my/JSON/key.json")); + Storage storage = gcsHelper.options().service(); String bucket = RemoteGcsHelper.generateBucketName(); storage.create(BucketInfo.of(bucket)); ``` @@ -65,5 +71,67 @@ Here is an example that clears the bucket created in Step 3 with a timeout of 5 RemoteGcsHelper.forceDelete(storage, bucket, 5, TimeUnit.SECONDS); ``` +### Testing code that uses Resource Manager + +#### On your machine + +You can test against a temporary local Resource Manager by following these steps: + +1. Before running your testing code, start the Resource Manager emulator `LocalResourceManagerHelper`. This can be done as follows: + + ```java + import com.google.gcloud.resourcemanager.testing.LocalResourceManagerHelper; + + LocalResourceManagerHelper helper = LocalResourceManagerHelper.create(); + helper.start(); + ``` + + This will spawn a server thread that listens to `localhost` at an ephemeral port for Resource Manager requests. + +2. In your program, create and use a Resource Manager service object whose host is set to `localhost` at the appropriate port. For example: + + ```java + ResourceManager resourceManager = LocalResourceManagerHelper.options().service(); + ``` + +3. Run your tests. + +4. Stop the Resource Manager emulator. + + ```java + helper.stop(); + ``` + + This method will block until the server thread has been terminated. + +### Testing code that uses BigQuery + +Currently, there isn't an emulator for Google BigQuery, so an alternative is to create a test +project. `RemoteBigQueryHelper` contains convenience methods to make setting up and cleaning up the +test project easier. To use this class, follow the steps below: + +1. Create a test Google Cloud project. + +2. Download a [JSON service account credentials file][create-service-account] from the Google +Developer's Console. + +3. Create a `RemoteBigQueryHelper` object using your project ID and JSON key. +Here is an example that uses the `RemoteBigQueryHelper` to create a dataset. + ```java + RemoteBigQueryHelper bigqueryHelper = + RemoteBigQueryHelper.create(PROJECT_ID, new FileInputStream("/path/to/my/JSON/key.json")); + BigQuery bigquery = bigqueryHelper.options().service(); + String dataset = RemoteBigQueryHelper.generateDatasetName(); + bigquery.create(DatasetInfo.builder(dataset).build()); + ``` + +4. Run your tests. + +5. Clean up the test project by using `forceDelete` to clear any datasets used. +Here is an example that clears the dataset created in Step 3. + ```java + RemoteBigQueryHelper.forceDelete(bigquery, dataset); + ``` [cloud-platform-storage-authentication]:https://cloud.google.com/storage/docs/authentication?hl=en#service_accounts +[create-service-account]:https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount \ No newline at end of file diff --git a/gcloud-java-bigquery/README.md b/gcloud-java-bigquery/README.md new file mode 100644 index 000000000000..eb347dfa0063 --- /dev/null +++ b/gcloud-java-bigquery/README.md @@ -0,0 +1,330 @@ +Google Cloud Java Client for BigQuery (Alpha) +==================================== + +Java idiomatic client for [Google Cloud BigQuery] (https://cloud.google.com/bigquery). + +[![Build Status](https://travis-ci.org/GoogleCloudPlatform/gcloud-java.svg?branch=master)](https://travis-ci.org/GoogleCloudPlatform/gcloud-java) +[![Coverage Status](https://coveralls.io/repos/GoogleCloudPlatform/gcloud-java/badge.svg?branch=master)](https://coveralls.io/r/GoogleCloudPlatform/gcloud-java?branch=master) +[![Maven](https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java-bigquery.svg)]( https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java-bigquery.svg) + +- [Homepage] (https://googlecloudplatform.github.io/gcloud-java/) +- [API Documentation] (http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/bigquery/package-summary.html) + +> Note: This client is a work-in-progress, and may occasionally +> make backwards-incompatible changes. + +Quickstart +---------- +If you are using Maven, add this to your pom.xml file +```xml + + com.google.gcloud + gcloud-java-bigquery + 0.1.3 + +``` +If you are using Gradle, add this to your dependencies +```Groovy +compile 'com.google.gcloud:gcloud-java-bigquery:0.1.3' +``` +If you are using SBT, add this to your dependencies +```Scala +libraryDependencies += "com.google.gcloud" % "gcloud-java-bigquery" % "0.1.3" +``` + +Example Application +------------------- +- [`BigQueryExample`](https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/gcloud-java-examples/src/main/java/com/google/gcloud/examples/BigQueryExample.java) - A simple command line interface providing some of Cloud BigQuery's functionality. +Read more about using this application on the [`gcloud-java-examples` docs page](http://googlecloudplatform.github.io/gcloud-java/apidocs/?com/google/gcloud/examples/BigQueryExample.html). + +Authentication +-------------- + +See the [Authentication](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) section in the base directory's README. + +About Google Cloud BigQuery +-------------------------- + +[Google Cloud BigQuery][cloud-bigquery] is a fully managed, NoOps, low cost data analytics service. +Data can be streamed into BigQuery at millions of rows per second to enable real-time analysis. +With BigQuery you can easily deploy Petabyte-scale Databases. + +Be sure to activate the Google Cloud BigQuery API on the Developer's Console to use BigQuery from +your project. + +See the ``gcloud-java`` API [bigquery documentation][bigquery-api] to learn how to interact +with Google Cloud BigQuery using this Client Library. + +Getting Started +--------------- +#### Prerequisites +For this tutorial, you will need a +[Google Developers Console](https://console.developers.google.com/) project with the BigQuery API +enabled. You will need to [enable billing](https://support.google.com/cloud/answer/6158867?hl=en) to +use Google Cloud BigQuery. +[Follow these instructions](https://cloud.google.com/docs/authentication#preparation) to get your +project set up. You will also need to set up the local development environment by [installing the +Google Cloud SDK](https://cloud.google.com/sdk/) and running the following commands in command line: +`gcloud auth login` and `gcloud config set project [YOUR PROJECT ID]`. + +#### Installation and setup +You'll need to obtain the `gcloud-java-bigquery` library. See the [Quickstart](#quickstart) section +to add `gcloud-java-bigquery` as a dependency in your code. + +#### Creating an authorized service object +To make authenticated requests to Google Cloud BigQuery, you must create a service object with +credentials. You can then make API calls by calling methods on the BigQuery service object. The +simplest way to authenticate is to use +[Application Default Credentials](https://developers.google.com/identity/protocols/application-default-credentials). +These credentials are automatically inferred from your environment, so you only need the following +code to create your service object: + +```java +import com.google.gcloud.bigquery.BigQuery; +import com.google.gcloud.bigquery.BigQueryOptions; + +BigQuery bigquery = BigQueryOptions.defaultInstance().service(); +``` + +For other authentication options, see the +[Authentication](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) page. + +#### Creating a dataset +With BigQuery you can create datasets. A dataset is a grouping mechanism that holds zero or more +tables. Add the following import at the top of your file: + +```java +import com.google.gcloud.bigquery.DatasetInfo; +``` +Then, to create the dataset, use the following code: + +```java +// Create a dataset +String datasetId = "my_dataset_id"; +bigquery.create(DatasetInfo.builder(datasetId).build()); +``` + +#### Creating a table +With BigQuery you can create different types of tables: normal tables with an associated schema, +external tables backed by data stored on [Google Cloud Storage][cloud-storage] and view tables that +are created from a BigQuery SQL query. In this code snippet we show how to create a normal table +with only one string field. Add the following imports at the top of your file: + +```java +import com.google.gcloud.bigquery.BaseTableInfo; +import com.google.gcloud.bigquery.Field; +import com.google.gcloud.bigquery.Schema; +import com.google.gcloud.bigquery.TableId; +import com.google.gcloud.bigquery.TableInfo; +``` +Then add the following code to create the table: + +```java +TableId tableId = TableId.of(datasetId, "my_table_id"); +// Table field definition +Field stringField = Field.of("StringField", Field.Type.string()); +// Table schema definition +Schema schema = Schema.of(stringField); +// Create a table +TableInfo createdTableInfo = bigquery.create(TableInfo.of(tableId, schema)); +``` + +#### Loading data into a table +BigQuery provides several ways to load data into a table: streaming rows or loading data from a +Google Cloud Storage file. In this code snippet we show how to stream rows into a table. +Add the following imports at the top of your file: + +```java +import com.google.gcloud.bigquery.InsertAllRequest; +import com.google.gcloud.bigquery.InsertAllResponse; + +import java.util.HashMap; +import java.util.Map; +``` +Then add the following code to insert data: + +```java +Map firstRow = new HashMap<>(); +Map secondRow = new HashMap<>(); +firstRow.put("StringField", "value1"); +secondRow.put("StringField", "value2"); +// Create an insert request +InsertAllRequest insertRequest = InsertAllRequest.builder(tableId) + .addRow(firstRow) + .addRow(secondRow) + .build(); +// Insert rows +InsertAllResponse insertResponse = bigquery.insertAll(insertRequest); +// Check if errors occurred +if (insertResponse.hasErrors()) { + System.out.println("Errors occurred while inserting rows"); +} +``` + +#### Querying data +BigQuery enables querying data by running queries and waiting for the result. Queries can be run +directly or through a Query Job. In this code snippet we show how to run a query directly and wait +for the result. Add the following imports at the top of your file: + +```java +import com.google.gcloud.bigquery.FieldValue; +import com.google.gcloud.bigquery.QueryRequest; +import com.google.gcloud.bigquery.QueryResponse; + +import java.util.Iterator; +import java.util.List; +``` +Then add the following code to run the query and wait for the result: + +```java +// Create a query request +QueryRequest queryRequest = + QueryRequest.builder("SELECT * FROM my_dataset_id.my_table_id") + .maxWaitTime(60000L) + .maxResults(1000L) + .build(); +// Request query to be executed and wait for results +QueryResponse queryResponse = bigquery.query(queryRequest); +while (!queryResponse.jobComplete()) { + Thread.sleep(1000L); + queryResponse = bigquery.getQueryResults(queryResponse.jobId()); +} +// Read rows +Iterator> rowIterator = queryResponse.result().iterateAll(); +System.out.println("Table rows:"); +while (rowIterator.hasNext()) { + System.out.println(rowIterator.next()); +} +``` +#### Complete source code + +Here we put together all the code shown above into one program. This program assumes that you are +running on Compute Engine or from your own desktop. To run this example on App Engine, simply move +the code from the main method to your application's servlet class and change the print statements to +display on your webpage. + +```java +import com.google.gcloud.bigquery.BaseTableInfo; +import com.google.gcloud.bigquery.BigQuery; +import com.google.gcloud.bigquery.BigQueryOptions; +import com.google.gcloud.bigquery.DatasetInfo; +import com.google.gcloud.bigquery.Field; +import com.google.gcloud.bigquery.FieldValue; +import com.google.gcloud.bigquery.InsertAllRequest; +import com.google.gcloud.bigquery.InsertAllResponse; +import com.google.gcloud.bigquery.QueryRequest; +import com.google.gcloud.bigquery.QueryResponse; +import com.google.gcloud.bigquery.Schema; +import com.google.gcloud.bigquery.TableId; +import com.google.gcloud.bigquery.TableInfo; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +public class GcloudBigQueryExample { + + public static void main(String[] args) throws InterruptedException { + + // Create a service instance + BigQuery bigquery = BigQueryOptions.defaultInstance().service(); + + // Create a dataset + String datasetId = "my_dataset_id"; + bigquery.create(DatasetInfo.builder(datasetId).build()); + + TableId tableId = TableId.of(datasetId, "my_table_id"); + // Table field definition + Field stringField = Field.of("StringField", Field.Type.string()); + // Table schema definition + Schema schema = Schema.of(stringField); + // Create a table + TableInfo createdTableInfo = bigquery.create(TableInfo.of(tableId, schema)); + + // Define rows to insert + Map firstRow = new HashMap<>(); + Map secondRow = new HashMap<>(); + firstRow.put("StringField", "value1"); + secondRow.put("StringField", "value2"); + // Create an insert request + InsertAllRequest insertRequest = InsertAllRequest.builder(tableId) + .addRow(firstRow) + .addRow(secondRow) + .build(); + // Insert rows + InsertAllResponse insertResponse = bigquery.insertAll(insertRequest); + // Check if errors occurred + if (insertResponse.hasErrors()) { + System.out.println("Errors occurred while inserting rows"); + } + + // Create a query request + QueryRequest queryRequest = + QueryRequest.builder("SELECT * FROM my_dataset_id.my_table_id") + .maxWaitTime(60000L) + .maxResults(1000L) + .build(); + // Request query to be executed and wait for results + QueryResponse queryResponse = bigquery.query(queryRequest); + while (!queryResponse.jobComplete()) { + Thread.sleep(1000L); + queryResponse = bigquery.getQueryResults(queryResponse.jobId()); + } + // Read rows + Iterator> rowIterator = queryResponse.result().iterateAll(); + System.out.println("Table rows:"); + while (rowIterator.hasNext()) { + System.out.println(rowIterator.next()); + } + } +} +``` + +Troubleshooting +--------------- + +To get help, follow the `gcloud-java` links in the `gcloud-*`[shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting). + +Java Versions +------------- + +Java 7 or above is required for using this client. + +Testing +------- + +This library has tools to help make tests for code using Cloud BigQuery. + +See [TESTING] to read more about testing. + +Versioning +---------- + +This library follows [Semantic Versioning] (http://semver.org/). + +It is currently in major version zero (``0.y.z``), which means that anything +may change at any time and the public API should not be considered +stable. + +Contributing +------------ + +Contributions to this library are always welcome and highly encouraged. + +See [CONTRIBUTING] for more information on how to get started. + +License +------- + +Apache 2.0 - See [LICENSE] for more information. + + +[CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md +[LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE +[TESTING]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/TESTING.md#testing-code-that-uses-bigquery +[cloud-platform]: https://cloud.google.com/ + +[cloud-bigquery]: https://cloud.google.com/bigquery/ +[cloud-storage]: https://cloud.google.com/storage/ +[bigquery-api]: http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/bigquery/package-summary.html diff --git a/gcloud-java-bigquery/pom.xml b/gcloud-java-bigquery/pom.xml new file mode 100644 index 000000000000..a5d711abf610 --- /dev/null +++ b/gcloud-java-bigquery/pom.xml @@ -0,0 +1,56 @@ + + + 4.0.0 + com.google.gcloud + gcloud-java-bigquery + jar + GCloud Java bigquery + + Java idiomatic client for Google Cloud BigQuery. + + + com.google.gcloud + gcloud-java-pom + 0.1.4-SNAPSHOT + + + gcloud-java-bigquery + + + + ${project.groupId} + gcloud-java-core + ${project.version} + + + ${project.groupId} + gcloud-java-storage + ${project.version} + test + + + com.google.apis + google-api-services-bigquery + v2-rev254-1.21.0 + compile + + + com.google.guava + guava-jdk5 + + + + + junit + junit + 4.12 + test + + + org.easymock + easymock + 3.3 + test + + + diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Acl.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Acl.java new file mode 100644 index 000000000000..b8e9926ce8c8 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Acl.java @@ -0,0 +1,444 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.Dataset.Access; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Access Control for a BigQuery Dataset. BigQuery uses ACLs to manage permissions on datasets. ACLs + * are not directly supported on tables. A table inherits its ACL from the dataset that contains it. + * Project roles affect your ability to run jobs or manage the project, while dataset roles affect + * how you can access or modify the data inside of a project. + * + * @see Access Control + */ +public final class Acl implements Serializable { + + private static final long serialVersionUID = 8357269726277191556L; + + private final Entity entity; + private final Role role; + + /** + * Dataset roles supported by BigQuery. + * + * @see Dataset Roles + */ + public enum Role { + /** + * Can read, query, copy or export tables in the dataset. + */ + READER, + + /** + * Same as {@link #READER} plus can edit or append data in the dataset. + */ + WRITER, + + /** + * Same as {@link #WRITER} plus can update and delete the dataset. + */ + OWNER + } + + /** + * Base class for BigQuery entities that can be grant access to the dataset. + */ + public abstract static class Entity implements Serializable { + + private static final long serialVersionUID = 8111776788607959944L; + + private final Type type; + + /** + * Types of BigQuery entities. + */ + public enum Type { + DOMAIN, GROUP, USER, VIEW + } + + Entity(Type type) { + this.type = type; + } + + public Type type() { + return type; + } + + abstract Access toPb(); + + static Entity fromPb(Access access) { + if (access.getDomain() != null) { + return new Domain(access.getDomain()); + } + if (access.getGroupByEmail() != null) { + return new Group(access.getGroupByEmail()); + } + if (access.getSpecialGroup() != null) { + return new Group(access.getSpecialGroup()); + } + if (access.getUserByEmail() != null) { + return new User(access.getUserByEmail()); + } + if (access.getView() != null) { + return new View(TableId.fromPb(access.getView())); + } + // Unreachable + throw new BigQueryException(BigQueryException.UNKNOWN_CODE, + "Unrecognized access configuration"); + } + } + + /** + * Class for a BigQuery Domain entity. Objects of this class represent a domain to grant access + * to. Any users signed in with the domain specified will be granted the specified access. + */ + public static final class Domain extends Entity { + + private static final long serialVersionUID = -3033025857280447253L; + + private final String domain; + + /** + * Creates a Domain entity given the domain name. + */ + public Domain(String domain) { + super(Type.DOMAIN); + this.domain = domain; + } + + /** + * Returns the domain name. + */ + public String domain() { + return domain; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + Domain domainEntity = (Domain) obj; + return Objects.equals(type(), domainEntity.type()) + && Objects.equals(domain, domainEntity.domain()); + } + + @Override + public int hashCode() { + return Objects.hash(type(), domain); + } + + @Override + public String toString() { + return toPb().toString(); + } + + @Override + Access toPb() { + return new Access().setDomain(domain); + } + } + + /** + * Class for a BigQuery Group entity. Objects of this class represent a group to granted access + * to. A Group entity can be created given the group's email or can be a special group: + * {@link #ofProjectOwners()}, {@link #ofProjectReaders()}, {@link #ofProjectWriters()} or + * {@link #ofAllAuthenticatedUsers()}. + */ + public static final class Group extends Entity { + + private static final String PROJECT_OWNERS = "projectOwners"; + private static final String PROJECT_READERS = "projectReaders"; + private static final String PROJECT_WRITERS = "projectWriters"; + private static final String ALL_AUTHENTICATED_USERS = "allAuthenticatedUsers"; + private static final long serialVersionUID = 5146829352398103029L; + + private final String identifier; + + /** + * Creates a Group entity given its identifier. Identifier can be either a + * + * special group identifier or a group email. + */ + public Group(String identifier) { + super(Type.GROUP); + this.identifier = identifier; + } + + /** + * Returns group's identifier, can be either a + * + * special group identifier or a group email. + */ + public String identifier() { + return identifier; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + Group group = (Group) obj; + return Objects.equals(type(), group.type()) && Objects.equals(identifier, group.identifier); + } + + @Override + public int hashCode() { + return Objects.hash(type(), identifier); + } + + @Override + public String toString() { + return toPb().toString(); + } + + @Override + Access toPb() { + switch (identifier) { + case PROJECT_OWNERS: + return new Access().setSpecialGroup(PROJECT_OWNERS); + case PROJECT_READERS: + return new Access().setSpecialGroup(PROJECT_READERS); + case PROJECT_WRITERS: + return new Access().setSpecialGroup(PROJECT_WRITERS); + case ALL_AUTHENTICATED_USERS: + return new Access().setSpecialGroup(ALL_AUTHENTICATED_USERS); + default: + return new Access().setGroupByEmail(identifier); + } + } + + /** + * Returns a Group entity representing all project's owners. + */ + public static Group ofProjectOwners() { + return new Group(PROJECT_OWNERS); + } + + /** + * Returns a Group entity representing all project's readers. + */ + public static Group ofProjectReaders() { + return new Group(PROJECT_READERS); + } + + /** + * Returns a Group entity representing all project's writers. + */ + public static Group ofProjectWriters() { + return new Group(PROJECT_WRITERS); + } + + /** + * Returns a Group entity representing all BigQuery authenticated users. + */ + public static Group ofAllAuthenticatedUsers() { + return new Group(ALL_AUTHENTICATED_USERS); + } + } + + /** + * Class for a BigQuery User entity. Objects of this class represent a user to grant access to + * given the email address. + */ + public static final class User extends Entity { + + private static final long serialVersionUID = -4942821351073996141L; + + private final String email; + + /** + * Creates a User entity given the user's email. + */ + public User(String email) { + super(Type.USER); + this.email = email; + } + + /** + * Returns user's email. + */ + public String email() { + return email; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + User user = (User) obj; + return Objects.equals(type(), user.type()) && Objects.equals(email, user.email); + } + + @Override + public int hashCode() { + return Objects.hash(type(), email); + } + + @Override + public String toString() { + return toPb().toString(); + } + + @Override + Access toPb() { + return new Access().setUserByEmail(email); + } + } + + /** + * Class for a BigQuery View entity. Objects of this class represent a view from a different + * dataset to grant access to. Queries executed against that view will have read access to tables + * in this dataset. The role field is not required when this field is set. If that view is updated + * by any user, access to the view needs to be granted again via an update operation. + */ + public static final class View extends Entity { + + private final TableId id; + + /** + * Creates a View entity given the view's id. + */ + public View(TableId id) { + super(Type.VIEW); + this.id = id; + } + + /** + * Returns table's identity. + */ + public TableId id() { + return id; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + View view = (View) obj; + return Objects.equals(type(), view.type()) && Objects.equals(id, view.id); + } + + @Override + public int hashCode() { + return Objects.hash(type(), id); + } + + @Override + public String toString() { + return toPb().toString(); + } + + @Override + Access toPb() { + return new Access().setView(id.toPb()); + } + } + + private Acl(Entity entity, Role role) { + this.entity = checkNotNull(entity); + this.role = role; + } + + /** + * Returns the entity for this ACL. + */ + public Entity entity() { + return entity; + } + + /** + * Returns the role specified by this ACL. + */ + public Role role() { + return role; + } + + /** + * Returns an Acl object. + * + * @param entity the entity for this ACL object + * @param role the role to associate to the {@code entity} object + */ + public static Acl of(Entity entity, Role role) { + return new Acl(entity, role); + } + + /** + * Returns an Acl object for a view entity. + */ + public static Acl of(View view) { + return new Acl(view, null); + } + + @Override + public int hashCode() { + return Objects.hash(entity, role); + } + + @Override + public String toString() { + return toPb().toString(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + final Acl other = (Acl) obj; + return Objects.equals(this.entity, other.entity) + && Objects.equals(this.role, other.role); + } + + Access toPb() { + Access accessPb = entity.toPb(); + if (role != null) { + accessPb.setRole(role.name()); + } + return accessPb; + } + + static Acl fromPb(Access access) { + return Acl.of(Entity.fromPb(access), + access.getRole() != null ? Role.valueOf(access.getRole()) : null); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BaseTableInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BaseTableInfo.java new file mode 100644 index 000000000000..8bb30f025c06 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BaseTableInfo.java @@ -0,0 +1,439 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.MoreObjects.firstNonNull; +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.client.util.Data; +import com.google.api.services.bigquery.model.Table; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.base.MoreObjects.ToStringHelper; + +import java.io.Serializable; +import java.math.BigInteger; +import java.util.Objects; + +/** + * Base class for Google BigQuery table information. Use {@link TableInfo} for a simple BigQuery + * Table. Use {@link ViewInfo} for a BigQuery View Table. Use {@link ExternalTableInfo} for a + * BigQuery Table backed by external data. + * + * @see Managing Tables + */ +public abstract class BaseTableInfo implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public BaseTableInfo apply(Table pb) { + return BaseTableInfo.fromPb(pb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public Table apply(BaseTableInfo tableInfo) { + return tableInfo.toPb(); + } + }; + + private static final long serialVersionUID = -7679032506430816205L; + + /** + * The table type. + */ + public enum Type { + /** + * A normal BigQuery table. + */ + TABLE, + + /** + * A virtual table defined by a SQL query. + * + * @see Views + */ + VIEW, + + /** + * A BigQuery table backed by external data. + * + * @see Federated Data + * Sources + */ + EXTERNAL + } + + private final String etag; + private final String id; + private final String selfLink; + private final TableId tableId; + private final Type type; + private final Schema schema; + private final String friendlyName; + private final String description; + private final Long numBytes; + private final Long numRows; + private final Long creationTime; + private final Long expirationTime; + private final Long lastModifiedTime; + + /** + * Base builder for tables. + * + * @param the table type + * @param the table builder + */ + public abstract static class Builder> { + + private String etag; + private String id; + private String selfLink; + private TableId tableId; + private Type type; + private Schema schema; + private String friendlyName; + private String description; + private Long numBytes; + private Long numRows; + private Long creationTime; + private Long expirationTime; + private Long lastModifiedTime; + + protected Builder() {} + + protected Builder(BaseTableInfo tableInfo) { + this.etag = tableInfo.etag; + this.id = tableInfo.id; + this.selfLink = tableInfo.selfLink; + this.tableId = tableInfo.tableId; + this.type = tableInfo.type; + this.schema = tableInfo.schema; + this.friendlyName = tableInfo.friendlyName; + this.description = tableInfo.description; + this.numBytes = tableInfo.numBytes; + this.numRows = tableInfo.numRows; + this.creationTime = tableInfo.creationTime; + this.expirationTime = tableInfo.expirationTime; + this.lastModifiedTime = tableInfo.lastModifiedTime; + } + + protected Builder(Table tablePb) { + this.type = Type.valueOf(tablePb.getType()); + this.tableId = TableId.fromPb(tablePb.getTableReference()); + if (tablePb.getSchema() != null) { + this.schema(Schema.fromPb(tablePb.getSchema())); + } + if (tablePb.getLastModifiedTime() != null) { + this.lastModifiedTime(tablePb.getLastModifiedTime().longValue()); + } + if (tablePb.getNumRows() != null) { + this.numRows(tablePb.getNumRows().longValue()); + } + this.description = tablePb.getDescription(); + this.expirationTime = tablePb.getExpirationTime(); + this.friendlyName = tablePb.getFriendlyName(); + this.creationTime = tablePb.getCreationTime(); + this.etag = tablePb.getEtag(); + this.id = tablePb.getId(); + this.numBytes = tablePb.getNumBytes(); + this.selfLink = tablePb.getSelfLink(); + } + + @SuppressWarnings("unchecked") + protected B self() { + return (B) this; + } + + B creationTime(Long creationTime) { + this.creationTime = creationTime; + return self(); + } + + /** + * Sets a user-friendly description for the table. + */ + public B description(String description) { + this.description = firstNonNull(description, Data.nullOf(String.class)); + return self(); + } + + B etag(String etag) { + this.etag = etag; + return self(); + } + + /** + * Sets the time when this table expires, in milliseconds since the epoch. If not present, the + * table will persist indefinitely. Expired tables will be deleted and their storage reclaimed. + */ + public B expirationTime(Long expirationTime) { + this.expirationTime = firstNonNull(expirationTime, Data.nullOf(Long.class)); + return self(); + } + + /** + * Sets a user-friendly name for the table. + */ + public B friendlyName(String friendlyName) { + this.friendlyName = firstNonNull(friendlyName, Data.nullOf(String.class)); + return self(); + } + + B id(String id) { + this.id = id; + return self(); + } + + B lastModifiedTime(Long lastModifiedTime) { + this.lastModifiedTime = lastModifiedTime; + return self(); + } + + B numBytes(Long numBytes) { + this.numBytes = numBytes; + return self(); + } + + B numRows(Long numRows) { + this.numRows = numRows; + return self(); + } + + B selfLink(String selfLink) { + this.selfLink = selfLink; + return self(); + } + + /** + * Sets the table identity. + */ + public B tableId(TableId tableId) { + this.tableId = checkNotNull(tableId); + return self(); + } + + B type(Type type) { + this.type = type; + return self(); + } + + /** + * Sets the table schema. + */ + public B schema(Schema schema) { + this.schema = checkNotNull(schema); + return self(); + } + + /** + * Creates an object. + */ + public abstract T build(); + } + + protected BaseTableInfo(Builder builder) { + this.tableId = checkNotNull(builder.tableId); + this.etag = builder.etag; + this.id = builder.id; + this.selfLink = builder.selfLink; + this.friendlyName = builder.friendlyName; + this.description = builder.description; + this.type = builder.type; + this.schema = builder.schema; + this.numBytes = builder.numBytes; + this.numRows = builder.numRows; + this.creationTime = builder.creationTime; + this.expirationTime = builder.expirationTime; + this.lastModifiedTime = builder.lastModifiedTime; + } + + /** + * Returns the hash of the table resource. + */ + public String etag() { + return etag; + } + + /** + * Returns an opaque id for the table. + */ + public String id() { + return id; + } + + /** + * Returns the table's type. If this table is simple table the method returns {@link Type#TABLE}. + * If this table is an external table this method returns {@link Type#EXTERNAL}. If this table is + * a view table this method returns {@link Type#VIEW}. + */ + public Type type() { + return type; + } + + /** + * Returns the table's schema. + */ + public Schema schema() { + return schema; + } + + /** + * Returns an URL that can be used to access the resource again. The returned URL can be used for + * get or update requests. + */ + public String selfLink() { + return selfLink; + } + + /** + * Returns the table identity. + */ + public TableId tableId() { + return tableId; + } + + /** + * Returns a user-friendly name for the table. + */ + public String friendlyName() { + return Data.isNull(friendlyName) ? null : friendlyName; + } + + /** + * Returns a user-friendly description for the table. + */ + public String description() { + return Data.isNull(description) ? null : description; + } + + /** + * Returns the size of this table in bytes, excluding any data in the streaming buffer. + */ + public Long numBytes() { + return numBytes; + } + + /** + * Returns the number of rows in this table, excluding any data in the streaming buffer. + */ + public Long numRows() { + return numRows; + } + + /** + * Returns the time when this table was created, in milliseconds since the epoch. + */ + public Long creationTime() { + return creationTime; + } + + /** + * Returns the time when this table expires, in milliseconds since the epoch. If not present, the + * table will persist indefinitely. Expired tables will be deleted and their storage reclaimed. + */ + public Long expirationTime() { + return Data.isNull(expirationTime) ? null : expirationTime; + } + + /** + * Returns the time when this table was last modified, in milliseconds since the epoch. + */ + public Long lastModifiedTime() { + return lastModifiedTime; + } + + /** + * Returns a builder for the object. + */ + public abstract Builder toBuilder(); + + ToStringHelper toStringHelper() { + return MoreObjects.toStringHelper(this) + .add("tableId", tableId) + .add("type", type) + .add("schema", schema) + .add("etag", etag) + .add("id", id) + .add("selfLink", selfLink) + .add("friendlyName", friendlyName) + .add("description", description) + .add("numBytes", numBytes) + .add("numRows", numRows) + .add("expirationTime", expirationTime) + .add("creationTime", creationTime) + .add("lastModifiedTime", lastModifiedTime); + } + + @Override + public String toString() { + return toStringHelper().toString(); + } + + protected final int baseHashCode() { + return Objects.hash(tableId); + } + + protected final boolean baseEquals(BaseTableInfo tableInfo) { + return Objects.equals(toPb(), tableInfo.toPb()); + } + + BaseTableInfo setProjectId(String projectId) { + return toBuilder().tableId(tableId().setProjectId(projectId)).build(); + } + + Table toPb() { + Table tablePb = new Table(); + tablePb.setTableReference(tableId.toPb()); + if (lastModifiedTime != null) { + tablePb.setLastModifiedTime(BigInteger.valueOf(lastModifiedTime)); + } + if (numRows != null) { + tablePb.setNumRows(BigInteger.valueOf(numRows)); + } + if (schema != null) { + tablePb.setSchema(schema.toPb()); + } + tablePb.setType(type.name()); + tablePb.setCreationTime(creationTime); + tablePb.setDescription(description); + tablePb.setEtag(etag); + tablePb.setExpirationTime(expirationTime); + tablePb.setFriendlyName(friendlyName); + tablePb.setId(id); + tablePb.setNumBytes(numBytes); + tablePb.setSelfLink(selfLink); + return tablePb; + } + + @SuppressWarnings("unchecked") + static T fromPb(Table tablePb) { + switch (Type.valueOf(tablePb.getType())) { + case TABLE: + return (T) TableInfo.fromPb(tablePb); + case VIEW: + return (T) ViewInfo.fromPb(tablePb); + case EXTERNAL: + return (T) ExternalTableInfo.fromPb(tablePb); + default: + // never reached + throw new IllegalArgumentException("Format " + tablePb.getType() + " is not supported"); + } + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java new file mode 100644 index 000000000000..6bc6a2ebabb5 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java @@ -0,0 +1,674 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkArgument; + +import com.google.common.base.Function; +import com.google.common.base.Joiner; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import com.google.gcloud.Page; +import com.google.gcloud.Service; +import com.google.gcloud.spi.BigQueryRpc; + +import java.util.List; +import java.util.Set; + +/** + * An interface for Google Cloud BigQuery. + * + * @see Google Cloud BigQuery + */ +public interface BigQuery extends Service { + + /** + * Fields of a BigQuery Dataset resource. + * + * @see Dataset + * Resource + */ + enum DatasetField { + ACCESS("access"), + CREATION_TIME("creationTime"), + DATASET_REFERENCE("datasetReference"), + DEFAULT_TABLE_EXPIRATION_MS("defaultTableExpirationMsS"), + DESCRIPTION("description"), + ETAG("etag"), + FRIENDLY_NAME("friendlyName"), + ID("id"), + LAST_MODIFIED_TIME("lastModifiedTime"), + LOCATION("location"), + SELF_LINK("selfLink"); + + private final String selector; + + DatasetField(String selector) { + this.selector = selector; + } + + public String selector() { + return selector; + } + + static String selector(DatasetField... fields) { + Set fieldStrings = Sets.newHashSetWithExpectedSize(fields.length + 1); + fieldStrings.add(DATASET_REFERENCE.selector()); + for (DatasetField field : fields) { + fieldStrings.add(field.selector()); + } + return Joiner.on(',').join(fieldStrings); + } + } + + /** + * Fields of a BigQuery Table resource. + * + * @see Table + * Resource + */ + enum TableField { + CREATION_TIME("creationTime"), + DESCRIPTION("description"), + ETAG("etag"), + EXPIRATION_TIME("expirationTime"), + EXTERNAL_DATA_CONFIGURATION("externalDataConfiguration"), + FRIENDLY_NAME("friendlyName"), + ID("id"), + LAST_MODIFIED_TIME("lastModifiedTime"), + LOCATION("location"), + NUM_BYTES("numBytes"), + NUM_ROWS("numRows"), + SCHEMA("schema"), + SELF_LINK("selfLink"), + STREAMING_BUFFER("streamingBuffer"), + TABLE_REFERENCE("tableReference"), + TYPE("type"), + VIEW("view"); + + private final String selector; + + TableField(String selector) { + this.selector = selector; + } + + public String selector() { + return selector; + } + + static String selector(TableField... fields) { + Set fieldStrings = Sets.newHashSetWithExpectedSize(fields.length + 2); + fieldStrings.add(TABLE_REFERENCE.selector()); + fieldStrings.add(TYPE.selector()); + for (TableField field : fields) { + fieldStrings.add(field.selector()); + } + return Joiner.on(',').join(fieldStrings); + } + } + + /** + * Fields of a BigQuery Job resource. + * + * @see Job Resource + * + */ + enum JobField { + CONFIGURATION("configuration"), + ETAG("etag"), + ID("id"), + JOB_REFERENCE("jobReference"), + SELF_LINK("selfLink"), + STATISTICS("statistics"), + STATUS("status"), + USER_EMAIL("user_email"); + + private final String selector; + + JobField(String selector) { + this.selector = selector; + } + + public String selector() { + return selector; + } + + static String selector(JobField... fields) { + Set fieldStrings = Sets.newHashSetWithExpectedSize(fields.length + 2); + fieldStrings.add(JOB_REFERENCE.selector()); + fieldStrings.add(CONFIGURATION.selector()); + for (JobField field : fields) { + fieldStrings.add(field.selector()); + } + return Joiner.on(',').join(fieldStrings); + } + } + + /** + * Class for specifying dataset list options. + */ + class DatasetListOption extends Option { + + private static final long serialVersionUID = 8660294969063340498L; + + private DatasetListOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the maximum number of datasets to be returned. + */ + public static DatasetListOption maxResults(long maxResults) { + return new DatasetListOption(BigQueryRpc.Option.MAX_RESULTS, maxResults); + } + + /** + * Returns an option to specify the page token from which to start listing datasets. + */ + public static DatasetListOption startPageToken(String pageToken) { + return new DatasetListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); + } + + /** + * Returns an options to list all datasets, even hidden ones. + */ + public static DatasetListOption all() { + return new DatasetListOption(BigQueryRpc.Option.ALL_DATASETS, true); + } + } + + /** + * Class for specifying dataset get, create and update options. + */ + class DatasetOption extends Option { + + private static final long serialVersionUID = 1674133909259913250L; + + private DatasetOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the dataset's fields to be returned by the RPC call. If this + * option is not provided all dataset's fields are returned. {@code DatasetOption.fields} can + * be used to specify only the fields of interest. {@link DatasetInfo#datasetId()} is always + * returned, even if not specified. + */ + public static DatasetOption fields(DatasetField... fields) { + return new DatasetOption(BigQueryRpc.Option.FIELDS, DatasetField.selector(fields)); + } + } + + /** + * Class for specifying dataset delete options. + */ + class DatasetDeleteOption extends Option { + + private static final long serialVersionUID = -7166083569900951337L; + + private DatasetDeleteOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to delete a dataset even if non-empty. If not provided, attempting to + * delete a non-empty dataset will result in a {@link BigQueryException} being thrown. + */ + public static DatasetDeleteOption deleteContents() { + return new DatasetDeleteOption(BigQueryRpc.Option.DELETE_CONTENTS, true); + } + } + + /** + * Class for specifying table list options. + */ + class TableListOption extends Option { + + private static final long serialVersionUID = 8660294969063340498L; + + private TableListOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the maximum number of tables to be returned. + */ + public static TableListOption maxResults(long maxResults) { + checkArgument(maxResults >= 0); + return new TableListOption(BigQueryRpc.Option.MAX_RESULTS, maxResults); + } + + /** + * Returns an option to specify the page token from which to start listing tables. + */ + public static TableListOption startPageToken(String pageToken) { + return new TableListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); + } + } + + /** + * Class for specifying table get, create and update options. + */ + class TableOption extends Option { + + private static final long serialVersionUID = -1723870134095936772L; + + private TableOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the table's fields to be returned by the RPC call. If this + * option is not provided all table's fields are returned. {@code TableOption.fields} can be + * used to specify only the fields of interest. {@link BaseTableInfo#tableId()} and + * {@link BaseTableInfo#type()} are always returned, even if not specified. + */ + public static TableOption fields(TableField... fields) { + return new TableOption(BigQueryRpc.Option.FIELDS, TableField.selector(fields)); + } + } + + /** + * Class for specifying table data list options. + */ + class TableDataListOption extends Option { + + private static final long serialVersionUID = 8488823381738864434L; + + private TableDataListOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the maximum number of rows to be returned. + */ + public static TableDataListOption maxResults(long maxResults) { + checkArgument(maxResults >= 0); + return new TableDataListOption(BigQueryRpc.Option.MAX_RESULTS, maxResults); + } + + /** + * Returns an option to specify the page token from which to start listing table data. + */ + public static TableDataListOption startPageToken(String pageToken) { + return new TableDataListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); + } + + /** + * Returns an option that sets the zero-based index of the row from which to start listing table + * data. + */ + public static TableDataListOption startIndex(long index) { + checkArgument(index >= 0); + return new TableDataListOption(BigQueryRpc.Option.START_INDEX, index); + } + } + + /** + * Class for specifying job list options. + */ + class JobListOption extends Option { + + private static final long serialVersionUID = -8207122131226481423L; + + private JobListOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to list all jobs, even the ones issued by other users. + */ + public static JobListOption allUsers() { + return new JobListOption(BigQueryRpc.Option.ALL_USERS, true); + } + + /** + * Returns an option to list only jobs that match the provided state filters. + */ + public static JobListOption stateFilter(JobStatus.State... stateFilters) { + List stringFilters = Lists.transform(ImmutableList.copyOf(stateFilters), + new Function() { + @Override + public String apply(JobStatus.State state) { + return state.name().toLowerCase(); + } + }); + return new JobListOption(BigQueryRpc.Option.STATE_FILTER, stringFilters); + } + + /** + * Returns an option to specify the maximum number of jobs to be returned. + */ + public static JobListOption maxResults(long maxResults) { + checkArgument(maxResults >= 0); + return new JobListOption(BigQueryRpc.Option.MAX_RESULTS, maxResults); + } + + /** + * Returns an option to specify the page token from which to start listing jobs. + */ + public static JobListOption startPageToken(String pageToken) { + return new JobListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); + } + + /** + * Returns an option to specify the job's fields to be returned by the RPC call. If this option + * is not provided all job's fields are returned. {@code JobOption.fields()} can be used to + * specify only the fields of interest. {@link JobInfo#jobId()}, {@link JobStatus#state()}, + * {@link JobStatus#error()} as well as type-specific configuration (e.g. + * {@link QueryJobConfiguration#query()} for Query Jobs) are always returned, even if not + * specified. {@link JobField#SELF_LINK} and {@link JobField#ETAG} can not be selected when + * listing jobs. + */ + public static JobListOption fields(JobField... fields) { + String selector = JobField.selector(fields); + StringBuilder builder = new StringBuilder(); + builder.append("etag,jobs(").append(selector).append(",state,errorResult),nextPageToken"); + return new JobListOption(BigQueryRpc.Option.FIELDS, builder.toString()); + } + } + + /** + * Class for specifying table get and create options. + */ + class JobOption extends Option { + + private static final long serialVersionUID = -3111736712316353665L; + + private JobOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the job's fields to be returned by the RPC call. If this option + * is not provided all job's fields are returned. {@code JobOption.fields()} can be used to + * specify only the fields of interest. {@link JobInfo#jobId()} as well as type-specific + * configuration (e.g. {@link QueryJobConfiguration#query()} for Query Jobs) are always + * returned, even if not specified. + */ + public static JobOption fields(JobField... fields) { + return new JobOption(BigQueryRpc.Option.FIELDS, JobField.selector(fields)); + } + } + + /** + * Class for specifying query results options. + */ + class QueryResultsOption extends Option { + + private static final long serialVersionUID = 3788898503226985525L; + + private QueryResultsOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the maximum number of rows to be returned. + */ + public static QueryResultsOption maxResults(long maxResults) { + checkArgument(maxResults >= 0); + return new QueryResultsOption(BigQueryRpc.Option.MAX_RESULTS, maxResults); + } + + /** + * Returns an option to specify the page token from which to start getting query results. + */ + public static QueryResultsOption startPageToken(String pageToken) { + return new QueryResultsOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); + } + + /** + * Returns an option that sets the zero-based index of the row from which to start getting query + * results. + */ + public static QueryResultsOption startIndex(long startIndex) { + checkArgument(startIndex >= 0); + return new QueryResultsOption(BigQueryRpc.Option.START_INDEX, startIndex); + } + + /** + * Returns an option that sets how long to wait for the query to complete, in milliseconds, + * before returning. Default is 10 seconds. If the timeout passes before the job completes, + * {@link QueryResponse#jobCompleted()} will be {@code false}. + */ + public static QueryResultsOption maxWaitTime(long maxWaitTime) { + checkArgument(maxWaitTime >= 0); + return new QueryResultsOption(BigQueryRpc.Option.TIMEOUT, maxWaitTime); + } + } + + /** + * Creates a new dataset. + * + * @throws BigQueryException upon failure + */ + DatasetInfo create(DatasetInfo dataset, DatasetOption... options) throws BigQueryException; + + /** + * Creates a new table. + * + * @throws BigQueryException upon failure + */ + T create(T table, TableOption... options) throws BigQueryException; + + /** + * Creates a new job. + * + * @throws BigQueryException upon failure + */ + JobInfo create(JobInfo job, JobOption... options) throws BigQueryException; + + /** + * Returns the requested dataset or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + DatasetInfo getDataset(String datasetId, DatasetOption... options) throws BigQueryException; + + /** + * Returns the requested dataset or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + DatasetInfo getDataset(DatasetId datasetId, DatasetOption... options) throws BigQueryException; + + /** + * Lists the project's datasets. This method returns partial information on each dataset + * ({@link DatasetInfo#datasetId()}, {@link DatasetInfo#friendlyName()} and + * {@link DatasetInfo#id()}). To get complete information use either + * {@link #getDataset(String, DatasetOption...)} or + * {@link #getDataset(DatasetId, DatasetOption...)}. + * + * @throws BigQueryException upon failure + */ + Page listDatasets(DatasetListOption... options) throws BigQueryException; + + /** + * Deletes the requested dataset. + * + * @return {@code true} if dataset was deleted, {@code false} if it was not found + * @throws BigQueryException upon failure + */ + boolean delete(String datasetId, DatasetDeleteOption... options) throws BigQueryException; + + /** + * Deletes the requested dataset. + * + * @return {@code true} if dataset was deleted, {@code false} if it was not found + * @throws BigQueryException upon failure + */ + boolean delete(DatasetId datasetId, DatasetDeleteOption... options) throws BigQueryException; + + /** + * Deletes the requested table. + * + * @return {@code true} if table was deleted, {@code false} if it was not found + * @throws BigQueryException upon failure + */ + boolean delete(String datasetId, String tableId) throws BigQueryException; + + /** + * Deletes the requested table. + * + * @return {@code true} if table was deleted, {@code false} if it was not found + * @throws BigQueryException upon failure + */ + boolean delete(TableId tableId) throws BigQueryException; + + /** + * Updates dataset information. + * + * @throws BigQueryException upon failure + */ + DatasetInfo update(DatasetInfo dataset, DatasetOption... options) throws BigQueryException; + + /** + * Updates table information. + * + * @throws BigQueryException upon failure + */ + T update(T table, TableOption... options) throws BigQueryException; + + /** + * Returns the requested table or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + T getTable(String datasetId, String tableId, TableOption... options) + throws BigQueryException; + + /** + * Returns the requested table or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + T getTable(TableId tableId, TableOption... options) + throws BigQueryException; + + /** + * Lists the tables in the dataset. This method returns partial information on each table + * ({@link BaseTableInfo#tableId()}, {@link BaseTableInfo#friendlyName()}, + * {@link BaseTableInfo#id()} and {@link BaseTableInfo#type()}). To get complete information use + * either {@link #getTable(TableId, TableOption...)} or + * {@link #getTable(String, String, TableOption...)}. + * + * @throws BigQueryException upon failure + */ + Page listTables(String datasetId, TableListOption... options) + throws BigQueryException; + + /** + * Lists the tables in the dataset. This method returns partial information on each table + * ({@link BaseTableInfo#tableId()}, {@link BaseTableInfo#friendlyName()}, + * {@link BaseTableInfo#id()} and {@link BaseTableInfo#type()}). To get complete information use + * either {@link #getTable(TableId, TableOption...)} or + * {@link #getTable(String, String, TableOption...)}. + * + * @throws BigQueryException upon failure + */ + Page listTables(DatasetId datasetId, TableListOption... options) + throws BigQueryException; + + /** + * Sends an insert all request. + * + * @throws BigQueryException upon failure + */ + InsertAllResponse insertAll(InsertAllRequest request) throws BigQueryException; + + /** + * Lists the table's rows. + * + * @throws BigQueryException upon failure + */ + Page> listTableData(String datasetId, String tableId, + TableDataListOption... options) throws BigQueryException; + + /** + * Lists the table's rows. + * + * @throws BigQueryException upon failure + */ + Page> listTableData(TableId tableId, TableDataListOption... options) + throws BigQueryException; + + /** + * Returns the requested job or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + JobInfo getJob(String jobId, JobOption... options) throws BigQueryException; + + /** + * Returns the requested job or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + JobInfo getJob(JobId jobId, JobOption... options) throws BigQueryException; + + /** + * Lists the jobs. + * + * @throws BigQueryException upon failure + */ + Page listJobs(JobListOption... options) throws BigQueryException; + + /** + * Sends a job cancel request. This call will return immediately. The job status can then be + * checked using either {@link #getJob(JobId, JobOption...)} or + * {@link #getJob(String, JobOption...)}). + * + * @return {@code true} if cancel was requested successfully, {@code false} if the job was not + * found + * @throws BigQueryException upon failure + */ + boolean cancel(String jobId) throws BigQueryException; + + /** + * Sends a job cancel request. This call will return immediately. The job status can then be + * checked using either {@link #getJob(JobId, JobOption...)} or + * {@link #getJob(String, JobOption...)}). + * + * @return {@code true} if cancel was requested successfully, {@code false} if the job was not + * found + * @throws BigQueryException upon failure + */ + boolean cancel(JobId tableId) throws BigQueryException; + + /** + * Runs the query associated with the request. + * + * @throws BigQueryException upon failure + */ + QueryResponse query(QueryRequest request) throws BigQueryException; + + /** + * Returns results of the query associated with the provided job. + * + * @throws BigQueryException upon failure + */ + QueryResponse getQueryResults(JobId job, QueryResultsOption... options) throws BigQueryException; + + /** + * Returns a channel to write data to be inserted into a BigQuery table. Data format and other + * options can be configured using the {@link WriteChannelConfiguration} parameter. + * + * @throws BigQueryException upon failure + */ + TableDataWriteChannel writer(WriteChannelConfiguration writeChannelConfiguration); +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryError.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryError.java new file mode 100644 index 000000000000..e58f0d0b7213 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryError.java @@ -0,0 +1,125 @@ +package com.google.gcloud.bigquery; + +import com.google.api.services.bigquery.model.ErrorProto; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Google Cloud BigQuery Error. Objects of this class represent errors encountered by the BigQuery + * service while executing a request. A BigQuery Job that terminated with an error has a non-null + * {@link JobStatus#error()}. A job can also encounter errors during its execution that do not cause + * the whole job to fail (see {@link JobStatus#executionErrors()}). Similarly, queries and insert + * all requests can cause BigQuery errors that do not mean the whole operation failed (see + * {@link QueryResponse#executionErrors()} and {@link InsertAllResponse#insertErrors()}). When a + * {@link BigQueryException} is thrown the BigQuery Error that caused it, if any, can be accessed + * with {@link BigQueryException#error()}. + */ +public class BigQueryError implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public BigQueryError apply(ErrorProto pb) { + return BigQueryError.fromPb(pb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public ErrorProto apply(BigQueryError error) { + return error.toPb(); + } + }; + private static final long serialVersionUID = -6566785320629096688L; + + private final String reason; + private final String location; + private final String debugInfo; + private final String message; + + public BigQueryError(String reason, String location, String message, String debugInfo) { + this.reason = reason; + this.location = location; + this.debugInfo = debugInfo; + this.message = message; + } + + public BigQueryError(String reason, String location, String message) { + this.reason = reason; + this.location = location; + this.message = message; + this.debugInfo = null; + } + + /** + * Returns short error code that summarizes the error. + * + * @see Troubleshooting + * Errors + */ + public String reason() { + return reason; + } + + /** + * Returns where the error occurred, if present. + */ + public String location() { + return location; + } + + String debugInfo() { + return debugInfo; + } + + /** + * Returns a human-readable description of the error. + */ + public String message() { + return message; + } + + @Override + public int hashCode() { + return Objects.hash(reason, location, message); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("reason", reason) + .add("location", location) + .add("message", message) + .toString(); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof BigQueryError && Objects.equals(toPb(), ((BigQueryError) obj).toPb()); + } + + ErrorProto toPb() { + ErrorProto errorPb = new ErrorProto(); + if (reason != null) { + errorPb.setReason(reason); + } + if (location != null) { + errorPb.setLocation(location); + } + if (message != null) { + errorPb.setMessage(message); + } + if (debugInfo != null) { + errorPb.setDebugInfo(debugInfo); + } + return errorPb; + } + + static BigQueryError fromPb(ErrorProto errorPb) { + return new BigQueryError(errorPb.getReason(), errorPb.getLocation(), errorPb.getMessage(), + errorPb.getDebugInfo()); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java new file mode 100644 index 000000000000..a157afd25db2 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java @@ -0,0 +1,87 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.common.collect.ImmutableSet; +import com.google.gcloud.BaseServiceException; +import com.google.gcloud.RetryHelper.RetryHelperException; +import com.google.gcloud.RetryHelper.RetryInterruptedException; + +import java.io.IOException; +import java.util.Set; + +/** + * BigQuery service exception. + * + * @see Google Cloud + * BigQuery error codes + */ +public class BigQueryException extends BaseServiceException { + + // see: https://cloud.google.com/bigquery/troubleshooting-errors + private static final Set RETRYABLE_ERRORS = ImmutableSet.of( + new Error(500, null), + new Error(502, null), + new Error(503, null), + new Error(504, null)); + private static final long serialVersionUID = -5006625989225438209L; + + private final BigQueryError error; + + public BigQueryException(int code, String message) { + this(code, message, null); + } + + public BigQueryException(int code, String message, BigQueryError error) { + super(code, message, error != null ? error.reason() : null, true); + this.error = error; + } + + public BigQueryException(IOException exception) { + super(exception, true); + BigQueryError error = null; + if (reason() != null) { + error = new BigQueryError(reason(), location(), getMessage(), debugInfo()); + } + this.error = error; + } + + /** + * Returns the {@link BigQueryError} that caused this exception. Returns {@code null} if none + * exists. + */ + public BigQueryError error() { + return error; + } + + @Override + protected Set retryableErrors() { + return RETRYABLE_ERRORS; + } + + /** + * Translate RetryHelperException to the BigQueryException that caused the error. This method will + * always throw an exception. + * + * @throws BigQueryException when {@code ex} was caused by a {@code BigQueryException} + * @throws RetryInterruptedException when {@code ex} is a {@code RetryInterruptedException} + */ + static BaseServiceException translateAndThrow(RetryHelperException ex) { + BaseServiceException.translateAndPropagateIfPossible(ex); + throw new BigQueryException(UNKNOWN_CODE, ex.getMessage()); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryFactory.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryFactory.java new file mode 100644 index 000000000000..90e7bbccd483 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryFactory.java @@ -0,0 +1,25 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.gcloud.ServiceFactory; + +/** + * An interface for BigQuery factories. + */ +public interface BigQueryFactory extends ServiceFactory { +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java new file mode 100644 index 000000000000..e521228d73bb --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java @@ -0,0 +1,611 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.gcloud.RetryHelper.runWithRetries; + +import com.google.api.services.bigquery.model.Dataset; +import com.google.api.services.bigquery.model.GetQueryResultsResponse; +import com.google.api.services.bigquery.model.Job; +import com.google.api.services.bigquery.model.Table; +import com.google.api.services.bigquery.model.TableDataInsertAllRequest; +import com.google.api.services.bigquery.model.TableDataInsertAllRequest.Rows; +import com.google.api.services.bigquery.model.TableRow; +import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.gcloud.BaseService; +import com.google.gcloud.Page; +import com.google.gcloud.PageImpl; +import com.google.gcloud.PageImpl.NextPageFetcher; +import com.google.gcloud.RetryHelper; +import com.google.gcloud.bigquery.InsertAllRequest.RowToInsert; +import com.google.gcloud.spi.BigQueryRpc; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; + +final class BigQueryImpl extends BaseService implements BigQuery { + + private static class DatasetPageFetcher implements NextPageFetcher { + + private static final long serialVersionUID = -3057564042439021278L; + private final Map requestOptions; + private final BigQueryOptions serviceOptions; + + DatasetPageFetcher(BigQueryOptions serviceOptions, String cursor, + Map optionMap) { + this.requestOptions = + PageImpl.nextRequestOptions(BigQueryRpc.Option.PAGE_TOKEN, cursor, optionMap); + this.serviceOptions = serviceOptions; + } + + @Override + public Page nextPage() { + return listDatasets(serviceOptions, requestOptions); + } + } + + private static class TablePageFetcher implements NextPageFetcher { + + private static final long serialVersionUID = 8611248840504201187L; + private final Map requestOptions; + private final BigQueryOptions serviceOptions; + private final String dataset; + + TablePageFetcher(String dataset, BigQueryOptions serviceOptions, String cursor, + Map optionMap) { + this.requestOptions = + PageImpl.nextRequestOptions(BigQueryRpc.Option.PAGE_TOKEN, cursor, optionMap); + this.serviceOptions = serviceOptions; + this.dataset = dataset; + } + + @Override + public Page nextPage() { + return listTables(dataset, serviceOptions, requestOptions); + } + } + + private static class JobPageFetcher implements NextPageFetcher { + + private static final long serialVersionUID = 8536533282558245472L; + private final Map requestOptions; + private final BigQueryOptions serviceOptions; + + JobPageFetcher(BigQueryOptions serviceOptions, String cursor, + Map optionMap) { + this.requestOptions = + PageImpl.nextRequestOptions(BigQueryRpc.Option.PAGE_TOKEN, cursor, optionMap); + this.serviceOptions = serviceOptions; + } + + @Override + public Page nextPage() { + return listJobs(serviceOptions, requestOptions); + } + } + + private static class TableDataPageFetcher implements NextPageFetcher> { + + private static final long serialVersionUID = -8501991114794410114L; + private final Map requestOptions; + private final BigQueryOptions serviceOptions; + private final TableId table; + + TableDataPageFetcher(TableId table, BigQueryOptions serviceOptions, String cursor, + Map optionMap) { + this.requestOptions = + PageImpl.nextRequestOptions(BigQueryRpc.Option.PAGE_TOKEN, cursor, optionMap); + this.serviceOptions = serviceOptions; + this.table = table; + } + + @Override + public Page> nextPage() { + return listTableData(table, serviceOptions, requestOptions); + } + } + + private static class QueryResultsPageFetcherImpl + implements NextPageFetcher>, QueryResult.QueryResultsPageFetcher { + + private static final long serialVersionUID = -9198905840550459803L; + private final Map requestOptions; + private final BigQueryOptions serviceOptions; + private final JobId job; + + QueryResultsPageFetcherImpl(JobId job, BigQueryOptions serviceOptions, String cursor, + Map optionMap) { + this.requestOptions = + PageImpl.nextRequestOptions(BigQueryRpc.Option.PAGE_TOKEN, cursor, optionMap); + this.serviceOptions = serviceOptions; + this.job = job; + } + + @Override + public QueryResult nextPage() { + return getQueryResults(job, serviceOptions, requestOptions).result(); + } + } + + private final BigQueryRpc bigQueryRpc; + + BigQueryImpl(BigQueryOptions options) { + super(options); + bigQueryRpc = options.rpc(); + } + + @Override + public DatasetInfo create(DatasetInfo dataset, DatasetOption... options) + throws BigQueryException { + final Dataset datasetPb = dataset.setProjectId(options().projectId()).toPb(); + final Map optionsMap = optionMap(options); + try { + return DatasetInfo.fromPb(runWithRetries(new Callable() { + @Override + public Dataset call() { + return bigQueryRpc.create(datasetPb, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public T create(T table, TableOption... options) + throws BigQueryException { + final Table tablePb = table.setProjectId(options().projectId()).toPb(); + final Map optionsMap = optionMap(options); + try { + return BaseTableInfo.fromPb(runWithRetries(new Callable() { + @Override + public Table call() { + return bigQueryRpc.create(tablePb, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public JobInfo create(JobInfo job, JobOption... options) throws BigQueryException { + final Job jobPb = job.setProjectId(options().projectId()).toPb(); + final Map optionsMap = optionMap(options); + try { + return JobInfo.fromPb(runWithRetries(new Callable() { + @Override + public Job call() { + return bigQueryRpc.create(jobPb, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public DatasetInfo getDataset(String datasetId, DatasetOption... options) + throws BigQueryException { + return getDataset(DatasetId.of(datasetId), options); + } + + @Override + public DatasetInfo getDataset(final DatasetId datasetId, DatasetOption... options) + throws BigQueryException { + final Map optionsMap = optionMap(options); + try { + Dataset answer = runWithRetries(new Callable() { + @Override + public Dataset call() { + return bigQueryRpc.getDataset(datasetId.dataset(), optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER); + return answer == null ? null : DatasetInfo.fromPb(answer); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public Page listDatasets(DatasetListOption... options) throws BigQueryException { + return listDatasets(options(), optionMap(options)); + } + + private static Page listDatasets(final BigQueryOptions serviceOptions, + final Map optionsMap) { + try { + BigQueryRpc.Tuple> result = + runWithRetries(new Callable>>() { + @Override + public BigQueryRpc.Tuple> call() { + return serviceOptions.rpc().listDatasets(optionsMap); + } + }, serviceOptions.retryParams(), EXCEPTION_HANDLER); + String cursor = result.x(); + return new PageImpl<>(new DatasetPageFetcher(serviceOptions, cursor, optionsMap), cursor, + Iterables.transform(result.y(), DatasetInfo.FROM_PB_FUNCTION)); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public boolean delete(String datasetId, DatasetDeleteOption... options) throws BigQueryException { + return delete(DatasetId.of(datasetId), options); + } + + @Override + public boolean delete(final DatasetId datasetId, DatasetDeleteOption... options) + throws BigQueryException { + final Map optionsMap = optionMap(options); + try { + return runWithRetries(new Callable() { + @Override + public Boolean call() { + return bigQueryRpc.deleteDataset(datasetId.dataset(), optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public boolean delete(String datasetId, String tableId) throws BigQueryException { + return delete(TableId.of(datasetId, tableId)); + } + + @Override + public boolean delete(final TableId tableId) throws BigQueryException { + try { + return runWithRetries(new Callable() { + @Override + public Boolean call() { + return bigQueryRpc.deleteTable(tableId.dataset(), tableId.table()); + } + }, options().retryParams(), EXCEPTION_HANDLER); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public DatasetInfo update(DatasetInfo dataset, DatasetOption... options) + throws BigQueryException { + final Dataset datasetPb = dataset.setProjectId(options().projectId()).toPb(); + final Map optionsMap = optionMap(options); + try { + return DatasetInfo.fromPb(runWithRetries(new Callable() { + @Override + public Dataset call() { + return bigQueryRpc.patch(datasetPb, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public T update(T table, TableOption... options) + throws BigQueryException { + final Table tablePb = table.setProjectId(options().projectId()).toPb(); + final Map optionsMap = optionMap(options); + try { + return BaseTableInfo.fromPb(runWithRetries(new Callable
() { + @Override + public Table call() { + return bigQueryRpc.patch(tablePb, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public T getTable(final String datasetId, final String tableId, + TableOption... options) throws BigQueryException { + return getTable(TableId.of(datasetId, tableId), options); + } + + @Override + public T getTable(final TableId tableId, TableOption... options) + throws BigQueryException { + final Map optionsMap = optionMap(options); + try { + Table answer = runWithRetries(new Callable
() { + @Override + public Table call() { + return bigQueryRpc.getTable(tableId.dataset(), tableId.table(), optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER); + return answer == null ? null : BaseTableInfo.fromPb(answer); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public Page listTables(String datasetId, TableListOption... options) + throws BigQueryException { + return listTables(datasetId, options(), optionMap(options)); + } + + @Override + public Page listTables(DatasetId datasetId, TableListOption... options) + throws BigQueryException { + return listTables(datasetId.dataset(), options(), optionMap(options)); + } + + private static Page listTables(final String datasetId, final BigQueryOptions + serviceOptions, final Map optionsMap) { + try { + BigQueryRpc.Tuple> result = + runWithRetries(new Callable>>() { + @Override + public BigQueryRpc.Tuple> call() { + return serviceOptions.rpc().listTables(datasetId, optionsMap); + } + }, serviceOptions.retryParams(), EXCEPTION_HANDLER); + String cursor = result.x(); + Iterable tables = Iterables.transform(result.y(), + BaseTableInfo.FROM_PB_FUNCTION); + return new PageImpl<>(new TablePageFetcher(datasetId, serviceOptions, cursor, optionsMap), + cursor, tables); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public InsertAllResponse insertAll(InsertAllRequest request) throws BigQueryException { + final TableId tableId = request.table(); + final TableDataInsertAllRequest requestPb = new TableDataInsertAllRequest(); + requestPb.setIgnoreUnknownValues(request.ignoreUnknownValues()); + requestPb.setSkipInvalidRows(request.skipInvalidRows()); + requestPb.setTemplateSuffix(request.templateSuffix()); + List rowsPb = Lists.transform(request.rows(), new Function() { + @Override + public Rows apply(RowToInsert rowToInsert) { + return new Rows().setInsertId(rowToInsert.id()).setJson(rowToInsert.content()); + } + }); + requestPb.setRows(rowsPb); + return InsertAllResponse.fromPb( + bigQueryRpc.insertAll(tableId.dataset(), tableId.table(), requestPb)); + } + + @Override + public Page> listTableData(String datasetId, String tableId, + TableDataListOption... options) throws BigQueryException { + return listTableData(TableId.of(datasetId, tableId), options(), optionMap(options)); + } + + @Override + public Page> listTableData(TableId tableId, TableDataListOption... options) + throws BigQueryException { + return listTableData(tableId, options(), optionMap(options)); + } + + private static Page> listTableData(final TableId tableId, + final BigQueryOptions serviceOptions, final Map optionsMap) { + try { + BigQueryRpc.Tuple> result = + runWithRetries(new Callable>>() { + @Override + public BigQueryRpc.Tuple> call() { + return serviceOptions.rpc() + .listTableData(tableId.dataset(), tableId.table(), optionsMap); + } + }, serviceOptions.retryParams(), EXCEPTION_HANDLER); + String cursor = result.x(); + return new PageImpl<>(new TableDataPageFetcher(tableId, serviceOptions, cursor, optionsMap), + cursor, transformTableData(result.y())); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + private static List> transformTableData(Iterable tableDataPb) { + return ImmutableList.copyOf( + Iterables.transform(tableDataPb != null ? tableDataPb : ImmutableList.of(), + new Function>() { + @Override + public List apply(TableRow rowPb) { + return Lists.transform(rowPb.getF(), FieldValue.FROM_PB_FUNCTION); + } + })); + } + + @Override + public JobInfo getJob(String jobId, JobOption... options) throws BigQueryException { + return getJob(JobId.of(jobId), options); + } + + @Override + public JobInfo getJob(final JobId jobId, JobOption... options) + throws BigQueryException { + final Map optionsMap = optionMap(options); + try { + Job answer = runWithRetries(new Callable() { + @Override + public Job call() { + return bigQueryRpc.getJob(jobId.job(), optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER); + return answer == null ? null : JobInfo.fromPb(answer); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public Page listJobs(JobListOption... options) throws BigQueryException { + return listJobs(options(), optionMap(options)); + } + + private static Page listJobs(final BigQueryOptions serviceOptions, + final Map optionsMap) { + BigQueryRpc.Tuple> result = + runWithRetries(new Callable>>() { + @Override + public BigQueryRpc.Tuple> call() { + return serviceOptions.rpc().listJobs(optionsMap); + } + }, serviceOptions.retryParams(), EXCEPTION_HANDLER); + String cursor = result.x(); + Iterable jobs = Iterables.transform(result.y(), JobInfo.FROM_PB_FUNCTION); + return new PageImpl<>(new JobPageFetcher(serviceOptions, cursor, optionsMap), cursor, jobs); + } + + @Override + public boolean cancel(String jobId) throws BigQueryException { + return cancel(JobId.of(jobId)); + } + + @Override + public boolean cancel(final JobId jobId) throws BigQueryException { + try { + return runWithRetries(new Callable() { + @Override + public Boolean call() { + return bigQueryRpc.cancel(jobId.job()); + } + }, options().retryParams(), EXCEPTION_HANDLER); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public QueryResponse query(final QueryRequest request) throws BigQueryException { + try { + com.google.api.services.bigquery.model.QueryResponse results = + runWithRetries(new Callable() { + @Override + public com.google.api.services.bigquery.model.QueryResponse call() { + return bigQueryRpc.query(request.setProjectId(options().projectId()).toPb()); + } + }, options().retryParams(), EXCEPTION_HANDLER); + QueryResponse.Builder builder = QueryResponse.builder(); + JobId completeJobId = JobId.fromPb(results.getJobReference()); + builder.jobId(completeJobId); + builder.jobCompleted(results.getJobComplete()); + List rowsPb = results.getRows(); + if (results.getJobComplete()) { + builder.jobCompleted(true); + QueryResult.Builder resultBuilder = transformQueryResults(completeJobId, rowsPb, + results.getPageToken(), options(), ImmutableMap.of()); + resultBuilder.totalBytesProcessed(results.getTotalBytesProcessed()); + resultBuilder.cacheHit(results.getCacheHit()); + if (results.getSchema() != null) { + resultBuilder.schema(Schema.fromPb(results.getSchema())); + } + if (results.getTotalRows() != null) { + resultBuilder.totalRows(results.getTotalRows().longValue()); + } + builder.result(resultBuilder.build()); + } + if (results.getErrors() != null) { + builder.executionErrors( + Lists.transform(results.getErrors(), BigQueryError.FROM_PB_FUNCTION)); + } + return builder.build(); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public QueryResponse getQueryResults(JobId job, QueryResultsOption... options) + throws BigQueryException { + Map optionsMap = optionMap(options); + return getQueryResults(job, options(), optionsMap); + } + + private static QueryResponse getQueryResults(final JobId jobId, + final BigQueryOptions serviceOptions, final Map optionsMap) { + try { + GetQueryResultsResponse results = + runWithRetries(new Callable() { + @Override + public GetQueryResultsResponse call() { + return serviceOptions.rpc().getQueryResults(jobId.job(), optionsMap); + } + }, serviceOptions.retryParams(), EXCEPTION_HANDLER); + QueryResponse.Builder builder = QueryResponse.builder(); + JobId completeJobId = JobId.fromPb(results.getJobReference()); + builder.jobId(completeJobId); + builder.etag(results.getEtag()); + builder.jobCompleted(results.getJobComplete()); + List rowsPb = results.getRows(); + if (results.getJobComplete()) { + QueryResult.Builder resultBuilder = transformQueryResults(completeJobId, rowsPb, + results.getPageToken(), serviceOptions, ImmutableMap.of()); + resultBuilder.totalBytesProcessed(results.getTotalBytesProcessed()); + resultBuilder.cacheHit(results.getCacheHit()); + if (results.getSchema() != null) { + resultBuilder.schema(Schema.fromPb(results.getSchema())); + } + if (results.getTotalRows() != null) { + resultBuilder.totalRows(results.getTotalRows().longValue()); + } + builder.result(resultBuilder.build()); + } + if (results.getErrors() != null) { + builder.executionErrors( + Lists.transform(results.getErrors(), BigQueryError.FROM_PB_FUNCTION)); + } + return builder.build(); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + private static QueryResult.Builder transformQueryResults(JobId jobId, List rowsPb, + String cursor, BigQueryOptions serviceOptions, Map optionsMap) { + QueryResultsPageFetcherImpl nextPageFetcher = + new QueryResultsPageFetcherImpl(jobId, serviceOptions, cursor, optionsMap); + return QueryResult.builder() + .pageFetcher(nextPageFetcher) + .cursor(cursor) + .results(transformTableData(rowsPb)); + } + + public TableDataWriteChannel writer(WriteChannelConfiguration writeChannelConfiguration) { + return new TableDataWriteChannel(options(), + writeChannelConfiguration.setProjectId(options().projectId())); + } + + private Map optionMap(Option... options) { + Map optionMap = Maps.newEnumMap(BigQueryRpc.Option.class); + for (Option option : options) { + Object prev = optionMap.put(option.rpcOption(), option.value()); + checkArgument(prev == null, "Duplicate option %s", option); + } + return optionMap; + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryOptions.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryOptions.java new file mode 100644 index 000000000000..71d43cfbe565 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryOptions.java @@ -0,0 +1,114 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.common.collect.ImmutableSet; +import com.google.gcloud.ServiceOptions; +import com.google.gcloud.spi.BigQueryRpc; +import com.google.gcloud.spi.BigQueryRpcFactory; +import com.google.gcloud.spi.DefaultBigQueryRpc; + +import java.util.Set; + +public class BigQueryOptions extends ServiceOptions { + + private static final String BIGQUERY_SCOPE = "https://www.googleapis.com/auth/bigquery"; + private static final Set SCOPES = ImmutableSet.of(BIGQUERY_SCOPE); + private static final long serialVersionUID = -215981591481708043L; + + public static class DefaultBigqueryFactory implements BigQueryFactory { + + private static final BigQueryFactory INSTANCE = new DefaultBigqueryFactory(); + + @Override + public BigQuery create(BigQueryOptions options) { + return new BigQueryImpl(options); + } + } + + public static class DefaultBigQueryRpcFactory implements BigQueryRpcFactory { + + private static final BigQueryRpcFactory INSTANCE = new DefaultBigQueryRpcFactory(); + + @Override + public BigQueryRpc create(BigQueryOptions options) { + return new DefaultBigQueryRpc(options); + } + } + + public static class Builder extends + ServiceOptions.Builder { + + private Builder() { + } + + private Builder(BigQueryOptions options) { + super(options); + } + + @Override + public BigQueryOptions build() { + return new BigQueryOptions(this); + } + } + + private BigQueryOptions(Builder builder) { + super(BigQueryFactory.class, BigQueryRpcFactory.class, builder); + } + + @Override + protected BigQueryFactory defaultServiceFactory() { + return DefaultBigqueryFactory.INSTANCE; + } + + @Override + protected BigQueryRpcFactory defaultRpcFactory() { + return DefaultBigQueryRpcFactory.INSTANCE; + } + + @Override + protected Set scopes() { + return SCOPES; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public int hashCode() { + return baseHashCode(); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof BigQueryOptions)) { + return false; + } + BigQueryOptions other = (BigQueryOptions) obj; + return baseEquals(other); + } + + public static BigQueryOptions defaultInstance() { + return builder().build(); + } + + public static Builder builder() { + return new Builder(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CopyJobConfiguration.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CopyJobConfiguration.java new file mode 100644 index 000000000000..1da4fdbe3cdd --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CopyJobConfiguration.java @@ -0,0 +1,257 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.JobConfigurationTableCopy; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects.ToStringHelper; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery copy job configuration. A copy job copies an existing table to another new or + * existing table. Copy job configurations have {@link JobConfiguration.Type#COPY} type. + */ +public final class CopyJobConfiguration extends JobConfiguration { + + private static final long serialVersionUID = 1140509641399762967L; + + private final List sourceTables; + private final TableId destinationTable; + private final JobInfo.CreateDisposition createDisposition; + private final JobInfo.WriteDisposition writeDisposition; + + public static final class Builder + extends JobConfiguration.Builder { + + private List sourceTables; + private TableId destinationTable; + private JobInfo.CreateDisposition createDisposition; + private JobInfo.WriteDisposition writeDisposition; + + private Builder() { + super(Type.COPY); + } + + private Builder(CopyJobConfiguration jobConfiguration) { + this(); + this.sourceTables = jobConfiguration.sourceTables; + this.destinationTable = jobConfiguration.destinationTable; + this.createDisposition = jobConfiguration.createDisposition; + this.writeDisposition = jobConfiguration.writeDisposition; + } + + private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) { + this(); + JobConfigurationTableCopy copyConfigurationPb = configurationPb.getCopy(); + this.destinationTable = TableId.fromPb(copyConfigurationPb.getDestinationTable()); + if (copyConfigurationPb.getSourceTables() != null) { + this.sourceTables = + Lists.transform(copyConfigurationPb.getSourceTables(), TableId.FROM_PB_FUNCTION); + } else { + this.sourceTables = ImmutableList.of(TableId.fromPb(copyConfigurationPb.getSourceTable())); + } + if (copyConfigurationPb.getCreateDisposition() != null) { + this.createDisposition = + JobInfo.CreateDisposition.valueOf(copyConfigurationPb.getCreateDisposition()); + } + if (copyConfigurationPb.getWriteDisposition() != null) { + this.writeDisposition = JobInfo.WriteDisposition.valueOf( + copyConfigurationPb.getWriteDisposition()); + } + } + + /** + * Sets the source tables to copy. + */ + public Builder sourceTables(List sourceTables) { + this.sourceTables = sourceTables != null ? ImmutableList.copyOf(sourceTables) : null; + return this; + } + + /** + * Sets the destination table of the copy job. + */ + public Builder destinationTable(TableId destinationTable) { + this.destinationTable = destinationTable; + return this; + } + + /** + * Sets whether the job is allowed to create new tables. + * + * @see + * Create Disposition + */ + public Builder createDisposition(JobInfo.CreateDisposition createDisposition) { + this.createDisposition = createDisposition; + return this; + } + + /** + * Sets the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + public Builder writeDisposition(JobInfo.WriteDisposition writeDisposition) { + this.writeDisposition = writeDisposition; + return this; + } + + public CopyJobConfiguration build() { + return new CopyJobConfiguration(this); + } + } + + private CopyJobConfiguration(Builder builder) { + super(builder); + this.sourceTables = checkNotNull(builder.sourceTables); + this.destinationTable = checkNotNull(builder.destinationTable); + this.createDisposition = builder.createDisposition; + this.writeDisposition = builder.writeDisposition; + } + + /** + * Returns the source tables to copy. + */ + public List sourceTables() { + return sourceTables; + } + + /** + * Returns the destination table to load the data into. + */ + public TableId destinationTable() { + return destinationTable; + } + + /** + * Returns whether the job is allowed to create new tables. + * + * @see + * Create Disposition + */ + public JobInfo.CreateDisposition createDisposition() { + return this.createDisposition; + } + + /** + * Returns the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + public JobInfo.WriteDisposition writeDisposition() { + return writeDisposition; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("sourceTables", sourceTables) + .add("destinationTable", destinationTable) + .add("createDisposition", createDisposition) + .add("writeDisposition", writeDisposition); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof CopyJobConfiguration && baseEquals((CopyJobConfiguration) obj); + } + + @Override + public int hashCode() { + return Objects.hash(baseHashCode(), sourceTables, destinationTable, createDisposition, + writeDisposition); + } + + @Override + CopyJobConfiguration setProjectId(final String projectId) { + Builder builder = toBuilder(); + builder.sourceTables( + Lists.transform(sourceTables(), new Function() { + @Override + public TableId apply(TableId tableId) { + return tableId.setProjectId(projectId); + } + })); + builder.destinationTable(destinationTable().setProjectId(projectId)); + return builder.build(); + } + + com.google.api.services.bigquery.model.JobConfiguration toPb() { + JobConfigurationTableCopy configurationPb = new JobConfigurationTableCopy(); + configurationPb.setDestinationTable(destinationTable.toPb()); + if (sourceTables.size() == 1) { + configurationPb.setSourceTable(sourceTables.get(0).toPb()); + } else { + configurationPb.setSourceTables(Lists.transform(sourceTables, TableId.TO_PB_FUNCTION)); + } + if (createDisposition != null) { + configurationPb.setCreateDisposition(createDisposition.toString()); + } + if (writeDisposition != null) { + configurationPb.setWriteDisposition(writeDisposition.toString()); + } + return new com.google.api.services.bigquery.model.JobConfiguration().setCopy(configurationPb); + } + + /** + * Creates a builder for a BigQuery Copy Job configuration given destination and source table. + */ + public static Builder builder(TableId destinationTable, TableId sourceTable) { + return builder(destinationTable, ImmutableList.of(checkNotNull(sourceTable))); + } + + /** + * Creates a builder for a BigQuery Copy Job configuration given destination and source tables. + */ + public static Builder builder(TableId destinationTable, List sourceTables) { + return new Builder().destinationTable(destinationTable).sourceTables(sourceTables); + } + + /** + * Returns a BigQuery Copy Job configuration for the given destination and source table. + */ + public static CopyJobConfiguration of(TableId destinationTable, TableId sourceTable) { + return builder(destinationTable, sourceTable).build(); + } + + /** + * Returns a BigQuery Copy Job configuration for the given destination and source tables. + */ + public static CopyJobConfiguration of(TableId destinationTable, List sourceTables) { + return builder(destinationTable, sourceTables).build(); + } + + @SuppressWarnings("unchecked") + static CopyJobConfiguration fromPb( + com.google.api.services.bigquery.model.JobConfiguration jobPb) { + return new Builder(jobPb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CsvOptions.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CsvOptions.java new file mode 100644 index 000000000000..274ef5678a8a --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CsvOptions.java @@ -0,0 +1,271 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.common.base.MoreObjects; + +import java.nio.charset.Charset; +import java.util.Objects; + +/** + * Google BigQuery options for CSV format. This class wraps some properties of CSV files used by + * BigQuery to parse external data. + */ +public class CsvOptions extends FormatOptions { + + private static final long serialVersionUID = 2193570529308612708L; + + private final Boolean allowJaggedRows; + private final Boolean allowQuotedNewLines; + private final String encoding; + private final String fieldDelimiter; + private final String quote; + private final Integer skipLeadingRows; + + public static final class Builder { + + private Boolean allowJaggedRows; + private Boolean allowQuotedNewLines; + private String encoding; + private String fieldDelimiter; + private String quote; + private Integer skipLeadingRows; + + private Builder() {} + + /** + * Set whether BigQuery should accept rows that are missing trailing optional columns. If + * {@code true}, BigQuery treats missing trailing columns as null values. If {@code false}, + * records with missing trailing columns are treated as bad records, and if there are too many + * bad records, an invalid error is returned in the job result. By default, rows with missing + * trailing columns are considered bad records. + */ + public Builder allowJaggedRows(Boolean allowJaggedRows) { + this.allowJaggedRows = allowJaggedRows; + return this; + } + + /** + * Sets whether BigQuery should allow quoted data sections that contain newline characters in a + * CSV file. By default quoted newline are not allowed. + */ + public Builder allowQuotedNewLines(Boolean allowQuotedNewLines) { + this.allowQuotedNewLines = allowQuotedNewLines; + return this; + } + + /** + * Sets the character encoding of the data. The supported values are UTF-8 or ISO-8859-1. The + * default value is UTF-8. BigQuery decodes the data after the raw, binary data has been split + * using the values set in {@link #quote(String)} and {@link #fieldDelimiter(String)}. + */ + public Builder encoding(String encoding) { + this.encoding = encoding; + return this; + } + + /** + * Sets the character encoding of the data. The supported values are UTF-8 or ISO-8859-1. The + * default value is UTF-8. BigQuery decodes the data after the raw, binary data has been split + * using the values set in {@link #quote(String)} and {@link #fieldDelimiter(String)}. + */ + public Builder encoding(Charset encoding) { + this.encoding = encoding.name(); + return this; + } + + /** + * Sets the separator for fields in a CSV file. BigQuery converts the string to ISO-8859-1 + * encoding, and then uses the first byte of the encoded string to split the data in its raw, + * binary state. BigQuery also supports the escape sequence "\t" to specify a tab separator. + * The default value is a comma (','). + */ + public Builder fieldDelimiter(String fieldDelimiter) { + this.fieldDelimiter = fieldDelimiter; + return this; + } + + /** + * Sets the value that is used to quote data sections in a CSV file. BigQuery converts the + * string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split + * the data in its raw, binary state. The default value is a double-quote ('"'). If your data + * does not contain quoted sections, set the property value to an empty string. If your data + * contains quoted newline characters, you must also set {@link #allowQuotedNewLines(Boolean)} + * property to {@code true}. + */ + public Builder quote(String quote) { + this.quote = quote; + return this; + } + + /** + * Sets the number of rows at the top of a CSV file that BigQuery will skip when reading the + * data. The default value is 0. This property is useful if you have header rows in the file + * that should be skipped. + */ + public Builder skipLeadingRows(Integer skipLeadingRows) { + this.skipLeadingRows = skipLeadingRows; + return this; + } + + /** + * Creates a {@code CsvOptions} object. + */ + public CsvOptions build() { + return new CsvOptions(this); + } + } + + private CsvOptions(Builder builder) { + super(FormatOptions.CSV); + this.allowJaggedRows = builder.allowJaggedRows; + this.allowQuotedNewLines = builder.allowQuotedNewLines; + this.encoding = builder.encoding; + this.fieldDelimiter = builder.fieldDelimiter; + this.quote = builder.quote; + this.skipLeadingRows = builder.skipLeadingRows; + } + + /** + * Returns whether BigQuery should accept rows that are missing trailing optional columns. If + * {@code true}, BigQuery treats missing trailing columns as null values. If {@code false}, + * records with missing trailing columns are treated as bad records, and if the number of bad + * records exceeds {@link ExternalDataConfiguration#maxBadRecords()}, an invalid error is returned + * in the job result. + */ + public Boolean allowJaggedRows() { + return allowJaggedRows; + } + + /** + * Returns whether BigQuery should allow quoted data sections that contain newline characters in a + * CSV file. + */ + public Boolean allowQuotedNewLines() { + return allowQuotedNewLines; + } + + /** + * Returns the character encoding of the data. The supported values are UTF-8 or ISO-8859-1. If + * not set, UTF-8 is used. BigQuery decodes the data after the raw, binary data has been split + * using the values set in {@link #quote()} and {@link #fieldDelimiter()}. + */ + public String encoding() { + return encoding; + } + + /** + * Returns the separator for fields in a CSV file. + */ + public String fieldDelimiter() { + return fieldDelimiter; + } + + /** + * Returns the value that is used to quote data sections in a CSV file. + */ + public String quote() { + return quote; + } + + /** + * Returns the number of rows at the top of a CSV file that BigQuery will skip when reading the + * data. + */ + public Integer skipLeadingRows() { + return skipLeadingRows; + } + + /** + * Returns a builder for the {@code CsvOptions} object. + */ + public Builder toBuilder() { + return new Builder() + .allowJaggedRows(allowJaggedRows) + .allowQuotedNewLines(allowQuotedNewLines) + .encoding(encoding) + .fieldDelimiter(fieldDelimiter) + .quote(quote) + .skipLeadingRows(skipLeadingRows); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("type", type()) + .add("allowJaggedRows", allowJaggedRows) + .add("allowQuotedNewLines", allowQuotedNewLines) + .add("encoding", encoding) + .add("fieldDelimiter", fieldDelimiter) + .add("quote", quote) + .add("skipLeadingRows", skipLeadingRows) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(type(), allowJaggedRows, allowQuotedNewLines, encoding, fieldDelimiter, + quote, skipLeadingRows); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof CsvOptions && Objects.equals(toPb(), ((CsvOptions) obj).toPb()); + } + + com.google.api.services.bigquery.model.CsvOptions toPb() { + com.google.api.services.bigquery.model.CsvOptions csvOptions = + new com.google.api.services.bigquery.model.CsvOptions(); + csvOptions.setAllowJaggedRows(allowJaggedRows); + csvOptions.setAllowQuotedNewlines(allowQuotedNewLines); + csvOptions.setEncoding(encoding); + csvOptions.setFieldDelimiter(fieldDelimiter); + csvOptions.setQuote(quote); + csvOptions.setSkipLeadingRows(skipLeadingRows); + return csvOptions; + } + + /** + * Returns a builder for a CsvOptions object. + */ + public static Builder builder() { + return new Builder(); + } + + static CsvOptions fromPb(com.google.api.services.bigquery.model.CsvOptions csvOptions) { + Builder builder = builder(); + if (csvOptions.getAllowJaggedRows() != null) { + builder.allowJaggedRows(csvOptions.getAllowJaggedRows()); + } + if (csvOptions.getAllowQuotedNewlines() != null) { + builder.allowQuotedNewLines(csvOptions.getAllowQuotedNewlines()); + } + if (csvOptions.getEncoding() != null) { + builder.encoding(csvOptions.getEncoding()); + } + if (csvOptions.getFieldDelimiter() != null) { + builder.fieldDelimiter(csvOptions.getFieldDelimiter()); + } + if (csvOptions.getQuote() != null) { + builder.quote(csvOptions.getQuote()); + } + if (csvOptions.getSkipLeadingRows() != null) { + builder.skipLeadingRows(csvOptions.getSkipLeadingRows()); + } + return builder.build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Dataset.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Dataset.java new file mode 100644 index 000000000000..facf5e659f99 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Dataset.java @@ -0,0 +1,287 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.base.Function; +import com.google.common.collect.Iterators; +import com.google.gcloud.Page; +import com.google.gcloud.PageImpl; + +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.Serializable; +import java.util.Iterator; +import java.util.List; +import java.util.Objects; + +/** + * A Google BigQuery Dataset. + * + *

Objects of this class are immutable. Operations that modify the dataset like {@link #update} + * return a new object. To get a {@code Dataset} object with the most recent information use + * {@link #reload}. + *

+ */ +public final class Dataset { + + private final BigQuery bigquery; + private final DatasetInfo info; + + private static class TablePageFetcher implements PageImpl.NextPageFetcher
{ + + private static final long serialVersionUID = 6906197848579250598L; + + private final BigQueryOptions options; + private final Page infoPage; + + TablePageFetcher(BigQueryOptions options, Page infoPage) { + this.options = options; + this.infoPage = infoPage; + } + + @Override + public Page
nextPage() { + Page nextInfoPage = infoPage.nextPage(); + return new PageImpl<>(new TablePageFetcher(options, nextInfoPage), + nextInfoPage.nextPageCursor(), new LazyTableIterable(options, nextInfoPage.values())); + } + } + + private static class LazyTableIterable implements Iterable
, Serializable { + + private static final long serialVersionUID = 3312744215731674032L; + + private final BigQueryOptions options; + private final Iterable infoIterable; + private transient BigQuery bigquery; + + public LazyTableIterable(BigQueryOptions options, Iterable infoIterable) { + this.options = options; + this.infoIterable = infoIterable; + this.bigquery = options.service(); + } + + private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + this.bigquery = options.service(); + } + + @Override + public Iterator
iterator() { + return Iterators.transform(infoIterable.iterator(), new Function() { + @Override + public Table apply(BaseTableInfo tableInfo) { + return new Table(bigquery, tableInfo); + } + }); + } + + @Override + public int hashCode() { + return Objects.hash(options, infoIterable); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof LazyTableIterable)) { + return false; + } + LazyTableIterable other = (LazyTableIterable) obj; + return Objects.equals(options, other.options) + && Objects.equals(infoIterable, other.infoIterable); + } + } + + /** + * Constructs a {@code Dataset} object for the provided {@code DatasetInfo}. The BigQuery service + * is used to issue requests. + * + * @param bigquery the BigQuery service used for issuing requests + * @param info dataset's info + */ + public Dataset(BigQuery bigquery, DatasetInfo info) { + this.bigquery = checkNotNull(bigquery); + this.info = checkNotNull(info); + } + + /** + * Creates a {@code Dataset} object for the provided dataset's user-defined id. Performs an RPC + * call to get the latest dataset information. + * + * @param bigquery the BigQuery service used for issuing requests + * @param dataset dataset's user-defined id + * @param options dataset options + * @return the {@code Dataset} object or {@code null} if not found + * @throws BigQueryException upon failure + */ + public static Dataset get(BigQuery bigquery, String dataset, BigQuery.DatasetOption... options) { + DatasetInfo info = bigquery.getDataset(dataset, options); + return info != null ? new Dataset(bigquery, info) : null; + } + + /** + * Returns the dataset's information. + */ + public DatasetInfo info() { + return info; + } + + /** + * Checks if this dataset exists. + * + * @return {@code true} if this dataset exists, {@code false} otherwise + * @throws BigQueryException upon failure + */ + public boolean exists() { + return bigquery.getDataset(info.datasetId(), BigQuery.DatasetOption.fields()) != null; + } + + /** + * Fetches current dataset's latest information. Returns {@code null} if the dataset does not + * exist. + * + * @param options dataset options + * @return a {@code Dataset} object with latest information or {@code null} if not found + * @throws BigQueryException upon failure + */ + public Dataset reload(BigQuery.DatasetOption... options) { + return Dataset.get(bigquery, info.datasetId().dataset(), options); + } + + /** + * Updates the dataset's information. Dataset's user-defined id cannot be changed. A new + * {@code Dataset} object is returned. + * + * @param datasetInfo new dataset's information. User-defined id must match the one of the current + * dataset + * @param options dataset options + * @return a {@code Dataset} object with updated information + * @throws BigQueryException upon failure + */ + public Dataset update(DatasetInfo datasetInfo, BigQuery.DatasetOption... options) { + checkArgument(Objects.equals(datasetInfo.datasetId().dataset(), + info.datasetId().dataset()), "Dataset's user-defined ids must match"); + return new Dataset(bigquery, bigquery.update(datasetInfo, options)); + } + + /** + * Deletes this dataset. + * + * @return {@code true} if dataset was deleted, {@code false} if it was not found + * @throws BigQueryException upon failure + */ + public boolean delete() { + return bigquery.delete(info.datasetId()); + } + + /** + * Returns the paginated list of tables in this dataset. + * + * @param options options for listing tables + * @throws BigQueryException upon failure + */ + public Page
list(BigQuery.TableListOption... options) { + Page infoPage = bigquery.listTables(info.datasetId(), options); + BigQueryOptions bigqueryOptions = bigquery.options(); + return new PageImpl<>(new TablePageFetcher(bigqueryOptions, infoPage), + infoPage.nextPageCursor(), new LazyTableIterable(bigqueryOptions, infoPage.values())); + } + + /** + * Returns the requested table in this dataset or {@code null} if not found. + * + * @param table user-defined id of the requested table + * @param options table options + * @throws BigQueryException upon failure + */ + public Table get(String table, BigQuery.TableOption... options) { + BaseTableInfo tableInfo = + bigquery.getTable(TableId.of(info.datasetId().dataset(), table), options); + return tableInfo != null ? new Table(bigquery, tableInfo) : null; + } + + /** + * Creates a new simple table in this dataset. + * + * @param table the table's user-defined id + * @param schema the table's schema + * @param options options for table creation + * @return a {@code Table} object for the created table + * @throws BigQueryException upon failure + */ + public Table create(String table, Schema schema, BigQuery.TableOption... options) { + BaseTableInfo tableInfo = TableInfo.of(TableId.of(info.datasetId().dataset(), table), schema); + return new Table(bigquery, bigquery.create(tableInfo, options)); + } + + /** + * Creates a new view table in this dataset. + * + * @param table the table's user-defined id + * @param query the query used to generate the table + * @param functions user-defined functions that can be used by the query + * @param options options for table creation + * @return a {@code Table} object for the created table + * @throws BigQueryException upon failure + */ + public Table create(String table, String query, List functions, + BigQuery.TableOption... options) { + BaseTableInfo tableInfo = + ViewInfo.of(TableId.of(info.datasetId().dataset(), table), query, functions); + return new Table(bigquery, bigquery.create(tableInfo, options)); + } + + /** + * Creates a new view table in this dataset. + * + * @param table the table's user-defined id + * @param query the query used to generate the table + * @param options options for table creation + * @return a {@code Table} object for the created table + * @throws BigQueryException upon failure + */ + public Table create(String table, String query, BigQuery.TableOption... options) { + BaseTableInfo tableInfo = ViewInfo.of(TableId.of(info.datasetId().dataset(), table), query); + return new Table(bigquery, bigquery.create(tableInfo, options)); + } + + /** + * Creates a new external table in this dataset. + * + * @param table the table's user-defined id + * @param configuration data format, location and other properties of an external table + * @param options options for table creation + * @return a {@code Table} object for the created table + * @throws BigQueryException upon failure + */ + public Table create(String table, ExternalDataConfiguration configuration, + BigQuery.TableOption... options) { + BaseTableInfo tableInfo = + ExternalTableInfo.of(TableId.of(info.datasetId().dataset(), table), configuration); + return new Table(bigquery, bigquery.create(tableInfo, options)); + } + + /** + * Returns the dataset's {@code BigQuery} object used to issue requests. + */ + public BigQuery bigquery() { + return bigquery; + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetId.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetId.java new file mode 100644 index 000000000000..006c089f8d63 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetId.java @@ -0,0 +1,97 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.DatasetReference; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Google BigQuery Dataset identity. + */ +public class DatasetId implements Serializable { + + private static final long serialVersionUID = -6186254820908152300L; + + private final String project; + private final String dataset; + + /** + * Returns project's user-defined id. + */ + public String project() { + return project; + } + + /** + * Returns dataset's user-defined id. + */ + public String dataset() { + return dataset; + } + + private DatasetId(String project, String dataset) { + this.project = project; + this.dataset = dataset; + } + + /** + * Creates a dataset identity given project's and dataset's user-defined ids. + */ + public static DatasetId of(String project, String dataset) { + return new DatasetId(checkNotNull(project), checkNotNull(dataset)); + } + + /** + * Creates a dataset identity given only its user-defined id. + */ + public static DatasetId of(String dataset) { + return new DatasetId(null, checkNotNull(dataset)); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof DatasetId && Objects.equals(toPb(), ((DatasetId) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(project, dataset); + } + + @Override + public String toString() { + return toPb().toString(); + } + + DatasetId setProjectId(String projectId) { + return project() != null ? this : DatasetId.of(projectId, dataset()); + } + + DatasetReference toPb() { + return new DatasetReference().setProjectId(project).setDatasetId(dataset); + } + + static DatasetId fromPb(DatasetReference datasetRef) { + return new DatasetId( + datasetRef.getProjectId(), + datasetRef.getDatasetId()); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetInfo.java new file mode 100644 index 000000000000..c6330308c8ce --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetInfo.java @@ -0,0 +1,444 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.MoreObjects.firstNonNull; +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.client.util.Data; +import com.google.api.services.bigquery.model.Dataset; +import com.google.api.services.bigquery.model.TableReference; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery Dataset information. A dataset is a grouping mechanism that holds zero or more + * tables. Datasets are the lowest level unit of access control; you cannot control access at the + * table level. + * + * @see + * Managing Jobs, Datasets, and Projects + */ +public final class DatasetInfo implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public DatasetInfo apply(Dataset pb) { + return DatasetInfo.fromPb(pb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public Dataset apply(DatasetInfo datasetInfo) { + return datasetInfo.toPb(); + } + }; + + private static final long serialVersionUID = -6615133444520365839L; + + private final DatasetId datasetId; + private final List acl; + private final Long creationTime; + private final Long defaultTableLifetime; + private final String description; + private final String etag; + private final String friendlyName; + private final String id; + private final Long lastModified; + private final String location; + private final String selfLink; + + public static final class Builder { + + private DatasetId datasetId; + private List acl; + private Long creationTime; + private Long defaultTableLifetime; + private String description; + private String etag; + private String friendlyName; + private String id; + private Long lastModified; + private String location; + private String selfLink; + + private Builder() {} + + private Builder(DatasetInfo datasetInfo) { + this.datasetId = datasetInfo.datasetId; + this.acl = datasetInfo.acl; + this.creationTime = datasetInfo.creationTime; + this.defaultTableLifetime = datasetInfo.defaultTableLifetime; + this.description = datasetInfo.description; + this.etag = datasetInfo.etag; + this.friendlyName = datasetInfo.friendlyName; + this.id = datasetInfo.id; + this.lastModified = datasetInfo.lastModified; + this.location = datasetInfo.location; + this.selfLink = datasetInfo.selfLink; + } + + /** + * Sets the dataset identity. + */ + public Builder datasetId(DatasetId datasetId) { + this.datasetId = checkNotNull(datasetId); + return this; + } + + /** + * Sets the dataset's access control configuration. + * + * @see Access Control + */ + public Builder acl(List acl) { + this.acl = acl != null ? ImmutableList.copyOf(acl) : null; + return this; + } + + Builder creationTime(Long creationTime) { + this.creationTime = creationTime; + return this; + } + + /** + * Sets the default lifetime of all tables in the dataset, in milliseconds. The minimum value is + * 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the + * dataset will have an expirationTime property set to the creation time plus the value in this + * property, and changing the value will only affect new tables, not existing ones. When the + * expirationTime for a given table is reached, that table will be deleted automatically. If a + * table's expirationTime is modified or removed before the table expires, or if you provide an + * explicit expirationTime when creating a table, that value takes precedence over the default + * expiration time indicated by this property. This property is experimental and might be + * subject to change or removed. + */ + public Builder defaultTableLifetime(Long defaultTableLifetime) { + this.defaultTableLifetime = + firstNonNull(defaultTableLifetime, Data.nullOf(Long.class)); + return this; + } + + /** + * Sets a user-friendly description for the dataset. + */ + public Builder description(String description) { + this.description = firstNonNull(description, Data.nullOf(String.class)); + return this; + } + + Builder etag(String etag) { + this.etag = etag; + return this; + } + + /** + * Sets a user-friendly name for the dataset. + */ + public Builder friendlyName(String friendlyName) { + this.friendlyName = firstNonNull(friendlyName, Data.nullOf(String.class)); + return this; + } + + Builder id(String id) { + this.id = id; + return this; + } + + Builder lastModified(Long lastModified) { + this.lastModified = lastModified; + return this; + } + + /** + * Sets the geographic location where the dataset should reside. This property is experimental + * and might be subject to change or removed. + * + * @see Dataset + * Location + */ + public Builder location(String location) { + this.location = firstNonNull(location, Data.nullOf(String.class)); + return this; + } + + Builder selfLink(String selfLink) { + this.selfLink = selfLink; + return this; + } + + /** + * Creates a {@code DatasetInfo} object. + */ + public DatasetInfo build() { + return new DatasetInfo(this); + } + } + + private DatasetInfo(Builder builder) { + datasetId = checkNotNull(builder.datasetId); + acl = builder.acl; + creationTime = builder.creationTime; + defaultTableLifetime = builder.defaultTableLifetime; + description = builder.description; + etag = builder.etag; + friendlyName = builder.friendlyName; + id = builder.id; + lastModified = builder.lastModified; + location = builder.location; + selfLink = builder.selfLink; + } + + /** + * Returns the dataset identity. + */ + public DatasetId datasetId() { + return datasetId; + } + + /** + * Returns the dataset's access control configuration. + * + * @see Access Control + */ + public List acl() { + return acl; + } + + /** + * Returns the time when this dataset was created, in milliseconds since the epoch. + */ + public Long creationTime() { + return creationTime; + } + + /** + * Returns the default lifetime of all tables in the dataset, in milliseconds. Once this property + * is set, all newly-created tables in the dataset will have an expirationTime property set to the + * creation time plus the value in this property, and changing the value will only affect new + * tables, not existing ones. When the expirationTime for a given table is reached, that table + * will be deleted automatically. If a table's expirationTime is modified or removed before the + * table expires, or if you provide an explicit expirationTime when creating a table, that value + * takes precedence over the default expiration time indicated by this property. + */ + public Long defaultTableLifetime() { + return defaultTableLifetime; + } + + /** + * Returns a user-friendly description for the dataset. + */ + public String description() { + return description; + } + + /** + * Returns the hash of the dataset resource. + */ + public String etag() { + return etag; + } + + /** + * Returns a user-friendly name for the dataset. + */ + public String friendlyName() { + return friendlyName; + } + + /** + * Returns an opaque id for the dataset. + */ + public String id() { + return id; + } + + /** + * Returns the time when this dataset or any of its tables was last modified, in milliseconds + * since the epoch. + */ + public Long lastModified() { + return lastModified; + } + + /** + * Returns the geographic location where the dataset should reside. + * + * @see + * Dataset Location + */ + public String location() { + return location; + } + + /** + * Returns an URL that can be used to access the resource again. The returned URL can be used for + * get or update requests. + */ + public String selfLink() { + return selfLink; + } + + /** + * Returns a builder for the {@code DatasetInfo} object. + */ + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("datasetId", datasetId) + .add("creationTime", creationTime) + .add("defaultTableLifetime", defaultTableLifetime) + .add("description", description) + .add("etag", etag) + .add("friendlyName", friendlyName) + .add("id", id) + .add("lastModified", lastModified) + .add("location", location) + .add("selfLink", selfLink) + .add("acl", acl) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(datasetId); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof DatasetInfo && Objects.equals(toPb(), ((DatasetInfo) obj).toPb()); + } + + DatasetInfo setProjectId(String projectId) { + Builder builder = toBuilder(); + builder.datasetId(datasetId().setProjectId(projectId)); + if (acl() != null) { + List acls = Lists.newArrayListWithCapacity(acl().size()); + for (Acl acl : acl()) { + if (acl.entity().type() == Acl.Entity.Type.VIEW) { + Dataset.Access accessPb = acl.toPb(); + TableReference viewReferencePb = accessPb.getView(); + if (viewReferencePb.getProjectId() == null) { + viewReferencePb.setProjectId(projectId); + } + acls.add(Acl.of(new Acl.View(TableId.fromPb(viewReferencePb)))); + } else { + acls.add(acl); + } + } + builder.acl(acls); + } + return builder.build(); + } + + Dataset toPb() { + Dataset datasetPb = new Dataset(); + datasetPb.setDatasetReference(datasetId.toPb()); + datasetPb.setCreationTime(creationTime); + datasetPb.setDefaultTableExpirationMs(defaultTableLifetime); + datasetPb.setDescription(description); + datasetPb.setEtag(etag); + datasetPb.setFriendlyName(friendlyName); + datasetPb.setId(id); + datasetPb.setLastModifiedTime(lastModified); + datasetPb.setLocation(location); + datasetPb.setSelfLink(selfLink); + if (acl != null) { + datasetPb.setAccess(Lists.transform(acl, new Function() { + @Override + public Dataset.Access apply(Acl acl) { + return acl.toPb(); + } + })); + } + return datasetPb; + } + + /** + * Returns a builder for the DatasetInfo object given it's user-defined id. + */ + public static Builder builder(String datasetId) { + return new Builder().datasetId(DatasetId.of(datasetId)); + } + + /** + * Returns a builder for the DatasetInfo object given it's project and user-defined id. + */ + public static Builder builder(String projectId, String datasetId) { + return new Builder().datasetId(DatasetId.of(projectId, datasetId)); + } + + /** + * Returns a builder for the DatasetInfo object given it's identity. + */ + public static Builder builder(DatasetId datasetId) { + return new Builder().datasetId(datasetId); + } + + static DatasetInfo fromPb(Dataset datasetPb) { + Builder builder = builder(datasetPb.getDatasetReference().getProjectId(), + datasetPb.getDatasetReference().getDatasetId()); + if (datasetPb.getAccess() != null) { + builder.acl(Lists.transform(datasetPb.getAccess(), + new Function() { + @Override + public Acl apply(Dataset.Access accessPb) { + return Acl.fromPb(accessPb); + } + })); + } + if (datasetPb.getCreationTime() != null) { + builder.creationTime(datasetPb.getCreationTime()); + } + if (datasetPb.getDefaultTableExpirationMs() != null) { + builder.defaultTableLifetime(datasetPb.getDefaultTableExpirationMs()); + } + if (datasetPb.getDescription() != null) { + builder.description(datasetPb.getDescription()); + } + if (datasetPb.getEtag() != null) { + builder.etag(datasetPb.getEtag()); + } + if (datasetPb.getFriendlyName() != null) { + builder.friendlyName(datasetPb.getFriendlyName()); + } + if (datasetPb.getId() != null) { + builder.id(datasetPb.getId()); + } + if (datasetPb.getLastModifiedTime() != null) { + builder.lastModified(datasetPb.getLastModifiedTime()); + } + if (datasetPb.getLocation() != null) { + builder.location(datasetPb.getLocation()); + } + if (datasetPb.getSelfLink() != null) { + builder.selfLink(datasetPb.getSelfLink()); + } + return builder.build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExternalDataConfiguration.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExternalDataConfiguration.java new file mode 100644 index 000000000000..4344aeba186b --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExternalDataConfiguration.java @@ -0,0 +1,397 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery configuration for tables backed by external data. Objects of this class describe + * the data format, location, and other properties of a table stored outside of BigQuery. + * By defining these properties, the data source can then be queried as if it were a standard + * BigQuery table. Support for external tables is experimental and might be subject to changes or + * removed. + * + * @see Federated Data Sources + * + */ +public class ExternalDataConfiguration implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public ExternalDataConfiguration apply( + com.google.api.services.bigquery.model.ExternalDataConfiguration configurationPb) { + return ExternalDataConfiguration.fromPb(configurationPb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public com.google.api.services.bigquery.model.ExternalDataConfiguration apply( + ExternalDataConfiguration configuration) { + return configuration.toPb(); + } + }; + + private static final long serialVersionUID = -8004288831035566549L; + + private final List sourceUris; + private final Schema schema; + private final FormatOptions formatOptions; + private final Integer maxBadRecords; + private final Boolean ignoreUnknownValues; + private final String compression; + + public static final class Builder { + + private List sourceUris; + private Schema schema; + private FormatOptions formatOptions; + private Integer maxBadRecords; + private Boolean ignoreUnknownValues; + private String compression; + + private Builder() {} + + /** + * Sets the fully-qualified URIs that point to your data in Google Cloud Storage (e.g. + * gs://bucket/path). Each URI can contain one '*' wildcard character that must come after the + * bucket's name. Size limits related to load jobs apply to external data sources, plus an + * additional limit of 10 GB maximum size across all URIs. + * + * @see Quota + */ + public Builder sourceUris(List sourceUris) { + this.sourceUris = ImmutableList.copyOf(checkNotNull(sourceUris)); + return this; + } + + /** + * Sets the schema for the external data. + */ + public Builder schema(Schema schema) { + this.schema = checkNotNull(schema); + return this; + } + + /** + * Sets the source format, and possibly some parsing options, of the external data. Supported + * formats are {@code CSV} and {@code NEWLINE_DELIMITED_JSON}. + * + * + * Source Format + */ + public Builder formatOptions(FormatOptions formatOptions) { + this.formatOptions = checkNotNull(formatOptions); + return this; + } + + /** + * Sets the maximum number of bad records that BigQuery can ignore when reading data. If the + * number of bad records exceeds this value, an invalid error is returned in the job result. + * The default value is 0, which requires that all records are valid. + */ + public Builder maxBadRecords(Integer maxBadRecords) { + this.maxBadRecords = maxBadRecords; + return this; + } + + /** + * Sets whether BigQuery should allow extra values that are not represented in the table schema. + * If true, the extra values are ignored. If false, records with extra columns are treated as + * bad records, and if there are too many bad records, an invalid error is returned in the job + * result. The default value is false. The value set with {@link #formatOptions(FormatOptions)} + * property determines what BigQuery treats as an extra value. + * + * @see + * Ignore Unknown Values + */ + public Builder ignoreUnknownValues(Boolean ignoreUnknownValues) { + this.ignoreUnknownValues = ignoreUnknownValues; + return this; + } + + /** + * Sets compression type of the data source. By default no compression is assumed. + * + * @see + * Compression + */ + public Builder compression(String compression) { + this.compression = compression; + return this; + } + + /** + * Creates an {@code ExternalDataConfiguration} object. + */ + public ExternalDataConfiguration build() { + return new ExternalDataConfiguration(this); + } + } + + ExternalDataConfiguration(Builder builder) { + this.compression = builder.compression; + this.ignoreUnknownValues = builder.ignoreUnknownValues; + this.maxBadRecords = builder.maxBadRecords; + this.schema = builder.schema; + this.formatOptions = builder.formatOptions; + this.sourceUris = builder.sourceUris; + } + + /** + * Returns the compression type of the data source. + * + * @see + * Compression + */ + public String compression() { + return compression; + } + + /** + * Returns whether BigQuery should allow extra values that are not represented in the table + * schema. If true, the extra values are ignored. If false, records with extra columns are treated + * as bad records, and if there are too many bad records, an invalid error is returned in the job + * result. The default value is false. The value of {@link #formatOptions()} determines what + * BigQuery treats as an extra value. + * + * @see + * Ignore Unknown Values + */ + public Boolean ignoreUnknownValues() { + return ignoreUnknownValues; + } + + /** + * Returns the maximum number of bad records that BigQuery can ignore when reading data. If the + * number of bad records exceeds this value, an invalid error is returned in the job result. + */ + public Integer maxBadRecords() { + return maxBadRecords; + } + + /** + * Returns the schema for the external data. + */ + public Schema schema() { + return schema; + } + + /** + * Returns the fully-qualified URIs that point to your data in Google Cloud Storage. Each URI can + * contain one '*' wildcard character that must come after the bucket's name. Size limits + * related to load jobs apply to external data sources, plus an additional limit of 10 GB + * maximum size across all URIs. + * + * @see Quota + */ + public List sourceUris() { + return sourceUris; + } + + /** + * Returns the source format, and possibly some parsing options, of the external data. Supported + * formats are {@code CSV} and {@code NEWLINE_DELIMITED_JSON}. + */ + @SuppressWarnings("unchecked") + public F formatOptions() { + return (F) formatOptions; + } + + /** + * Returns a builder for the {@code ExternalDataConfiguration} object. + */ + public Builder toBuilder() { + return new Builder() + .compression(compression) + .ignoreUnknownValues(ignoreUnknownValues) + .maxBadRecords(maxBadRecords) + .schema(schema) + .formatOptions(formatOptions) + .sourceUris(sourceUris); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("sourceUris", sourceUris) + .add("formatOptions", formatOptions) + .add("schema", schema) + .add("compression", compression) + .add("ignoreUnknownValues", ignoreUnknownValues) + .add("maxBadRecords", maxBadRecords) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(compression, ignoreUnknownValues, maxBadRecords, schema, formatOptions, + sourceUris); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof ExternalDataConfiguration + && Objects.equals(toPb(), ((ExternalDataConfiguration) obj).toPb()); + } + + com.google.api.services.bigquery.model.ExternalDataConfiguration toPb() { + com.google.api.services.bigquery.model.ExternalDataConfiguration externalConfigurationPb = + new com.google.api.services.bigquery.model.ExternalDataConfiguration(); + if (compression != null) { + externalConfigurationPb.setCompression(compression); + } + if (ignoreUnknownValues != null) { + externalConfigurationPb.setIgnoreUnknownValues(ignoreUnknownValues); + } + if (maxBadRecords != null) { + externalConfigurationPb.setMaxBadRecords(maxBadRecords); + } + if (schema != null) { + externalConfigurationPb.setSchema(schema.toPb()); + } + if (formatOptions != null) { + externalConfigurationPb.setSourceFormat(formatOptions.type()); + } + if (sourceUris != null) { + externalConfigurationPb.setSourceUris(sourceUris); + } + if (formatOptions != null && FormatOptions.CSV.equals(formatOptions.type())) { + externalConfigurationPb.setCsvOptions(((CsvOptions) formatOptions).toPb()); + } + return externalConfigurationPb; + } + + /** + * Creates a builder for an ExternalDataConfiguration object. + * + * @param sourceUris the fully-qualified URIs that point to your data in Google Cloud Storage. + * Each URI can contain one '*' wildcard character that must come after the bucket's name. + * Size limits related to load jobs apply to external data sources, plus an additional limit + * of 10 GB maximum size across all URIs. + * @param schema the schema for the external data + * @param format the source format of the external data + * @return a builder for an ExternalDataConfiguration object given source URIs, schema and format + * + * @see Quota + * @see + * Source Format + */ + public static Builder builder(List sourceUris, Schema schema, FormatOptions format) { + return new Builder().sourceUris(sourceUris).schema(schema).formatOptions(format); + } + + /** + * Creates a builder for an ExternalDataConfiguration object. + * + * @param sourceUri a fully-qualified URI that points to your data in Google Cloud Storage. The + * URI can contain one '*' wildcard character that must come after the bucket's name. Size + * limits related to load jobs apply to external data sources. + * @param schema the schema for the external data + * @param format the source format of the external data + * @return a builder for an ExternalDataConfiguration object given source URI, schema and format + * + * @see Quota + * @see + * Source Format + */ + public static Builder builder(String sourceUri, Schema schema, FormatOptions format) { + return new Builder() + .sourceUris(ImmutableList.of(sourceUri)) + .schema(schema) + .formatOptions(format); + } + + /** + * Creates an ExternalDataConfiguration object. + * + * @param sourceUris the fully-qualified URIs that point to your data in Google Cloud Storage. + * Each URI can contain one '*' wildcard character that must come after the bucket's name. + * Size limits related to load jobs apply to external data sources, plus an additional limit + * of 10 GB maximum size across all URIs. + * @param schema the schema for the external data + * @param format the source format of the external data + * @return an ExternalDataConfiguration object given source URIs, schema and format + * + * @see Quota + * @see + * Source Format + */ + public static ExternalDataConfiguration of(List sourceUris, Schema schema, + FormatOptions format) { + return builder(sourceUris, schema, format).build(); + } + + /** + * Creates an ExternalDataConfiguration object. + * + * @param sourceUri a fully-qualified URI that points to your data in Google Cloud Storage. The + * URI can contain one '*' wildcard character that must come after the bucket's name. Size + * limits related to load jobs apply to external data sources. + * @param schema the schema for the external data + * @param format the source format of the external data + * @return an ExternalDataConfiguration object given source URIs, schema and format + * + * @see Quota + * @see + * Source Format + */ + public static ExternalDataConfiguration of(String sourceUri, Schema schema, + FormatOptions format) { + return builder(sourceUri, schema, format).build(); + } + + static ExternalDataConfiguration fromPb( + com.google.api.services.bigquery.model.ExternalDataConfiguration externalDataConfiguration) { + Builder builder = new Builder(); + if (externalDataConfiguration.getSourceUris() != null) { + builder.sourceUris(externalDataConfiguration.getSourceUris()); + } + if (externalDataConfiguration.getSchema() != null) { + builder.schema(Schema.fromPb(externalDataConfiguration.getSchema())); + } + if (externalDataConfiguration.getSourceFormat() != null) { + builder.formatOptions(FormatOptions.of(externalDataConfiguration.getSourceFormat())); + } + if (externalDataConfiguration.getCompression() != null) { + builder.compression(externalDataConfiguration.getCompression()); + } + if (externalDataConfiguration.getIgnoreUnknownValues() != null) { + builder.ignoreUnknownValues(externalDataConfiguration.getIgnoreUnknownValues()); + } + if (externalDataConfiguration.getCsvOptions() != null) { + builder.formatOptions(CsvOptions.fromPb(externalDataConfiguration.getCsvOptions())); + } + if (externalDataConfiguration.getMaxBadRecords() != null) { + builder.maxBadRecords(externalDataConfiguration.getMaxBadRecords()); + } + return builder.build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExternalTableInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExternalTableInfo.java new file mode 100644 index 000000000000..80a094425484 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExternalTableInfo.java @@ -0,0 +1,149 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.Table; +import com.google.common.base.MoreObjects.ToStringHelper; + +import java.util.Objects; + +/** + * Google BigQuery External Table information. BigQuery's external tables are tables whose data + * reside outside of BigQuery but can be queried as normal BigQuery tables. External tables are + * experimental and might be subject to change or removed. + * + * @see Federated Data Sources + * + */ +public class ExternalTableInfo extends BaseTableInfo { + + private static final long serialVersionUID = -5893406738246214865L; + + private final ExternalDataConfiguration configuration; + + public static final class Builder extends BaseTableInfo.Builder { + + private ExternalDataConfiguration configuration; + + private Builder() {} + + private Builder(ExternalTableInfo tableInfo) { + super(tableInfo); + this.configuration = tableInfo.configuration; + } + + protected Builder(Table tablePb) { + super(tablePb); + if (tablePb.getExternalDataConfiguration() != null) { + this.configuration = + ExternalDataConfiguration.fromPb(tablePb.getExternalDataConfiguration()); + } + } + + /** + * Sets the data format, location and other properties of a table stored outside of BigQuery. + * + * @see Federated Data + * Sources + */ + public Builder configuration(ExternalDataConfiguration configuration) { + this.configuration = checkNotNull(configuration); + return self(); + } + + /** + * Creates a {@code ExternalTableInfo} object. + */ + @Override + public ExternalTableInfo build() { + return new ExternalTableInfo(this); + } + } + + private ExternalTableInfo(Builder builder) { + super(builder); + this.configuration = builder.configuration; + } + + /** + * Returns the data format, location and other properties of a table stored outside of BigQuery. + * This property is experimental and might be subject to change or removed. + * + * @see Federated Data Sources + * + */ + public ExternalDataConfiguration configuration() { + return configuration; + } + + /** + * Returns a builder for the {@code ExternalTableInfo} object. + */ + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper().add("configuration", configuration); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof ExternalTableInfo && baseEquals((ExternalTableInfo) obj); + } + + @Override + public int hashCode() { + return Objects.hash(baseHashCode(), configuration); + } + + @Override + Table toPb() { + Table tablePb = super.toPb(); + tablePb.setExternalDataConfiguration(configuration.toPb()); + return tablePb; + } + + /** + * Returns a builder for a BigQuery External Table. + * + * @param tableId table id + * @param configuration data format, location and other properties of an External Table + */ + public static Builder builder(TableId tableId, ExternalDataConfiguration configuration) { + return new Builder().tableId(tableId).type(Type.EXTERNAL).configuration(configuration); + } + + /** + * Returns a BigQuery External Table. + * + * @param table table id + * @param configuration data format, location and other properties of an External Table + */ + public static ExternalTableInfo of(TableId table, ExternalDataConfiguration configuration) { + return builder(table, configuration).build(); + } + + @SuppressWarnings("unchecked") + static ExternalTableInfo fromPb(Table tablePb) { + return new Builder(tablePb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExtractJobConfiguration.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExtractJobConfiguration.java new file mode 100644 index 000000000000..d8e57bd17254 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExtractJobConfiguration.java @@ -0,0 +1,293 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.JobConfigurationExtract; +import com.google.common.base.MoreObjects.ToStringHelper; +import com.google.common.collect.ImmutableList; + +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery extract job configuration. An extract job exports a BigQuery table to Google + * Cloud Storage. The extract destination provided as URIs that point to objects in Google Cloud + * Storage. Extract job configurations have {@link JobConfiguration.Type#EXTRACT} type. + */ +public final class ExtractJobConfiguration extends JobConfiguration { + + private static final long serialVersionUID = 4147749733166593761L; + + private final TableId sourceTable; + private final List destinationUris; + private final Boolean printHeader; + private final String fieldDelimiter; + private final String format; + private final String compression; + + public static final class Builder + extends JobConfiguration.Builder { + + private TableId sourceTable; + private List destinationUris; + private Boolean printHeader; + private String fieldDelimiter; + private String format; + private String compression; + + private Builder() { + super(Type.EXTRACT); + } + + private Builder(ExtractJobConfiguration jobInfo) { + this(); + this.sourceTable = jobInfo.sourceTable; + this.destinationUris = jobInfo.destinationUris; + this.printHeader = jobInfo.printHeader; + this.fieldDelimiter = jobInfo.fieldDelimiter; + this.format = jobInfo.format; + this.compression = jobInfo.compression; + } + + private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) { + this(); + JobConfigurationExtract extractConfigurationPb = configurationPb.getExtract(); + this.sourceTable = TableId.fromPb(extractConfigurationPb.getSourceTable()); + this.destinationUris = extractConfigurationPb.getDestinationUris(); + this.printHeader = extractConfigurationPb.getPrintHeader(); + this.fieldDelimiter = extractConfigurationPb.getFieldDelimiter(); + this.format = extractConfigurationPb.getDestinationFormat(); + this.compression = extractConfigurationPb.getCompression(); + } + + /** + * Sets the table to export. + */ + public Builder sourceTable(TableId sourceTable) { + this.sourceTable = sourceTable; + return this; + } + + /** + * Sets the list of fully-qualified Google Cloud Storage URIs (e.g. gs://bucket/path) where the + * extracted table should be written. + */ + public Builder destinationUris(List destinationUris) { + this.destinationUris = destinationUris != null ? ImmutableList.copyOf(destinationUris) : null; + return this; + } + + /** + * Sets whether to print out a header row in the results. By default an header is printed. + */ + public Builder printHeader(Boolean printHeader) { + this.printHeader = printHeader; + return this; + } + + /** + * Sets the delimiter to use between fields in the exported data. By default "," is used. + */ + public Builder fieldDelimiter(String fieldDelimiter) { + this.fieldDelimiter = fieldDelimiter; + return this; + } + + /** + * Sets the exported file format. If not set table is exported in CSV format. + * + * + * Destination Format + */ + public Builder format(String format) { + this.format = format; + return this; + } + + /** + * Sets the compression value to use for exported files. If not set exported files are not + * compressed. + * + * + * Compression + */ + public Builder compression(String compression) { + this.compression = compression; + return this; + } + + public ExtractJobConfiguration build() { + return new ExtractJobConfiguration(this); + } + } + + private ExtractJobConfiguration(Builder builder) { + super(builder); + this.sourceTable = checkNotNull(builder.sourceTable); + this.destinationUris = checkNotNull(builder.destinationUris); + this.printHeader = builder.printHeader; + this.fieldDelimiter = builder.fieldDelimiter; + this.format = builder.format; + this.compression = builder.compression; + } + + /** + * Returns the table to export. + */ + public TableId sourceTable() { + return sourceTable; + } + + /** + * Returns the list of fully-qualified Google Cloud Storage URIs where the extracted table should + * be written. + * + * @see + * Exporting Data Into One or More Files + */ + public List destinationUris() { + return destinationUris; + } + + /** + * Returns whether an header row is printed with the result. + */ + public Boolean printHeader() { + return printHeader; + } + + /** + * Returns the delimiter used between fields in the exported data. + */ + public String fieldDelimiter() { + return fieldDelimiter; + } + + /** + * Returns the exported files format. + */ + public String format() { + return format; + } + + /** + * Returns the compression value of exported files. + */ + public String compression() { + return compression; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("sourceTable", sourceTable) + .add("destinationUris", destinationUris) + .add("format", format) + .add("printHeader", printHeader) + .add("fieldDelimiter", fieldDelimiter) + .add("compression", compression); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof ExtractJobConfiguration && baseEquals((ExtractJobConfiguration) obj); + } + + @Override + public int hashCode() { + return Objects.hash(baseHashCode(), sourceTable, destinationUris, printHeader, fieldDelimiter, + format, compression); + } + + @Override + ExtractJobConfiguration setProjectId(String projectId) { + return toBuilder().sourceTable(sourceTable().setProjectId(projectId)).build(); + } + + com.google.api.services.bigquery.model.JobConfiguration toPb() { + JobConfigurationExtract extractConfigurationPb = new JobConfigurationExtract(); + extractConfigurationPb.setDestinationUris(destinationUris); + extractConfigurationPb.setSourceTable(sourceTable.toPb()); + extractConfigurationPb.setPrintHeader(printHeader); + extractConfigurationPb.setFieldDelimiter(fieldDelimiter); + extractConfigurationPb.setDestinationFormat(format); + extractConfigurationPb.setCompression(compression); + return new com.google.api.services.bigquery.model.JobConfiguration() + .setExtract(extractConfigurationPb); + } + + /** + * Creates a builder for a BigQuery Extract Job configuration given source table and destination + * URI. + */ + public static Builder builder(TableId sourceTable, String destinationUri) { + return builder(sourceTable, ImmutableList.of(checkNotNull(destinationUri))); + } + + /** + * Creates a builder for a BigQuery Extract Job configuration given source table and destination + * URIs. + */ + public static Builder builder(TableId sourceTable, List destinationUris) { + return new Builder().sourceTable(sourceTable).destinationUris(destinationUris); + } + + /** + * Returns a BigQuery Extract Job configuration for the given source table and destination URI. + */ + public static ExtractJobConfiguration of(TableId sourceTable, String destinationUri) { + return builder(sourceTable, destinationUri).build(); + } + + /** + * Returns a BigQuery Extract Job configuration for the given source table and destination URIs. + */ + public static ExtractJobConfiguration of(TableId sourceTable, List destinationUris) { + return builder(sourceTable, destinationUris).build(); + } + + /** + * Returns a BigQuery Extract Job configuration for the given source table, format and destination + * URI. + */ + public static ExtractJobConfiguration of(TableId sourceTable, String destinationUri, + String format) { + return builder(sourceTable, destinationUri).format(format).build(); + } + + /** + * Returns a BigQuery Extract Job configuration for the given source table, format and destination + * URIs. + */ + public static ExtractJobConfiguration of(TableId sourceTable, List destinationUris, + String format) { + return builder(sourceTable, destinationUris).format(format).build(); + } + + @SuppressWarnings("unchecked") + static ExtractJobConfiguration fromPb( + com.google.api.services.bigquery.model.JobConfiguration confPb) { + return new Builder(confPb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Field.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Field.java new file mode 100644 index 000000000000..55fae44c5eed --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Field.java @@ -0,0 +1,375 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.MoreObjects.firstNonNull; +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.client.util.Data; +import com.google.api.services.bigquery.model.TableFieldSchema; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery Table field. A table field has a name, a value, a mode and possibly a + * description. Supported types are: {@link Type#integer()}, {@link Type#bool()}, + * {@link Type#string()}, {@link Type#floatingPoint()}, {@link Type#timestamp()} and + * {@link Type#record(Field...)}. One or more fields form a table's schema. + */ +public class Field implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public Field apply(TableFieldSchema pb) { + return Field.fromPb(pb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public TableFieldSchema apply(Field field) { + return field.toPb(); + } + }; + + private static final long serialVersionUID = -8154262932305199256L; + + /** + * Data Types for a BigQuery Table field. This class provides factory methods for all BigQuery + * field types. To instantiate a RECORD value the list of sub-fields must be provided. + * + * @see + * Data Types + */ + public static class Type implements Serializable { + + private static final long serialVersionUID = 2841484762609576959L; + + public enum Value { + STRING, INTEGER, FLOAT, BOOLEAN, TIMESTAMP, RECORD + } + + private final Value value; + private final List fields; + + private Type(Value value) { + this.value = checkNotNull(value); + this.fields = null; + } + + private Type(Value value, List fields) { + checkArgument(fields.size() > 0, "Record must have at least one field"); + this.value = value; + this.fields = fields; + } + + /** + * Returns the value identifier. + * + * @see + * Data Types + */ + public Value value() { + return value; + } + + /** + * Returns the list of sub-fields if {@link #value()} is set to {@link Value#RECORD}. Returns + * {@code null} otherwise. + */ + public List fields() { + return fields; + } + + /** + * Returns a {@link Value#STRING} field value. + */ + public static Type string() { + return new Type(Value.STRING); + } + + /** + * Returns an {@link Value#INTEGER} field value. + */ + public static Type integer() { + return new Type(Value.INTEGER); + } + + /** + * Returns a {@link Value#FLOAT} field value. + */ + public static Type floatingPoint() { + return new Type(Value.FLOAT); + } + + /** + * Returns a {@link Value#BOOLEAN} field value. + */ + public static Type bool() { + return new Type(Value.BOOLEAN); + } + + /** + * Returns a {@link Value#TIMESTAMP} field value. + */ + public static Type timestamp() { + return new Type(Value.TIMESTAMP); + } + + /** + * Returns a {@link Value#RECORD} field value with associated list of sub-fields. + */ + public static Type record(Field... fields) { + return new Type(Value.RECORD, ImmutableList.copyOf(fields)); + } + + /** + * Returns a {@link Value#RECORD} field value with associated list of sub-fields. + */ + public static Type record(List fields) { + return new Type(Value.RECORD, ImmutableList.copyOf(checkNotNull(fields))); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("value", value) + .add("fields", fields) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(value, fields); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Type)) { + return false; + } + Type other = (Type) obj; + return Objects.equals(value, other.value) + && Objects.equals(fields, other.fields); + } + } + + /** + * Mode for a BigQuery Table field. {@link Mode#NULLABLE} fields can be set to {@code null}, + * {@link Mode#REQUIRED} fields must be provided. {@link Mode#REPEATED} fields can contain more + * than one value. + */ + public enum Mode { + NULLABLE, REQUIRED, REPEATED + } + + private final String name; + private final Type type; + private final String mode; + private final String description; + + public static final class Builder { + + private String name; + private Type type; + private String mode; + private String description; + + private Builder() {} + + private Builder(Field field) { + this.name = field.name; + this.type = field.type; + this.mode = field.mode; + this.description = field.description; + } + + /** + * Sets the field name. The name must contain only letters (a-z, A-Z), numbers (0-9), or + * underscores (_), and must start with a letter or underscore. The maximum length is 128 + * characters. + */ + public Builder name(String name) { + this.name = checkNotNull(name); + return this; + } + + /** + * Sets the value of the field. + * + * @see + * Data Types + */ + public Builder type(Type type) { + this.type = checkNotNull(type); + return this; + } + + /** + * Sets the mode of the field. When not specified {@link Mode#NULLABLE} is used. + */ + public Builder mode(Mode mode) { + this.mode = mode != null ? mode.name() : Data.nullOf(String.class); + return this; + } + + /** + * Sets the field description. The maximum length is 16K characters. + */ + public Builder description(String description) { + this.description = firstNonNull(description, Data.nullOf(String.class)); + return this; + } + + /** + * Creates a {@code Field} object. + */ + public Field build() { + return new Field(this); + } + } + + private Field(Builder builder) { + this.name = checkNotNull(builder.name); + this.type = checkNotNull(builder.type); + this.mode = builder.mode; + this.description = builder.description; + } + + /** + * Returns the field name. + */ + public String name() { + return name; + } + + /** + * Returns the field value. + * + * @see + * Data Types + */ + public Type type() { + return type; + } + + /** + * Returns the field mode. By default {@link Mode#NULLABLE} is used. + */ + public Mode mode() { + return mode != null ? Mode.valueOf(mode) : null; + } + + /** + * Returns the field description. + */ + public String description() { + return Data.isNull(description) ? null : description; + } + + /** + * Returns the list of sub-fields if {@link #type()} is a {@link Type.Value#RECORD}. Returns + * {@code null} otherwise. + */ + public List fields() { + return type.fields(); + } + + /** + * Returns a builder for the {@code Field} object. + */ + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("name", name) + .add("value", type) + .add("mode", mode) + .add("description", description) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(name, type, mode, description); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof Field && Objects.equals(toPb(), ((Field) obj).toPb()); + } + + TableFieldSchema toPb() { + TableFieldSchema fieldSchemaPb = new TableFieldSchema(); + fieldSchemaPb.setName(name); + fieldSchemaPb.setType(type.value().name()); + if (mode != null) { + fieldSchemaPb.setMode(mode); + } + if (description != null) { + fieldSchemaPb.setDescription(description); + } + if (fields() != null) { + List fieldsPb = Lists.transform(fields(), TO_PB_FUNCTION); + fieldSchemaPb.setFields(fieldsPb); + } + return fieldSchemaPb; + } + + /** + * Returns a Field object with given name and value. + */ + public static Field of(String name, Type type) { + return builder(name, type).build(); + } + + /** + * Returns a builder for a Field object with given name and value. + */ + public static Builder builder(String name, Type type) { + return new Builder().name(name).type(type); + } + + static Field fromPb(TableFieldSchema fieldSchemaPb) { + Builder fieldBuilder = new Builder(); + fieldBuilder.name(fieldSchemaPb.getName()); + Type.Value enumValue = Type.Value.valueOf(fieldSchemaPb.getType()); + if (fieldSchemaPb.getMode() != null) { + fieldBuilder.mode(Mode.valueOf(fieldSchemaPb.getMode())); + } + if (fieldSchemaPb.getDescription() != null) { + fieldBuilder.description(fieldSchemaPb.getDescription()); + } + if (fieldSchemaPb.getFields() != null) { + fieldBuilder.type(Type.record(Lists.transform(fieldSchemaPb.getFields(), FROM_PB_FUNCTION))); + } else { + fieldBuilder.type(new Type(enumValue)); + } + return fieldBuilder.build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/FieldValue.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/FieldValue.java new file mode 100644 index 000000000000..24c4b28b7613 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/FieldValue.java @@ -0,0 +1,266 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; +import static com.google.common.base.Preconditions.checkState; + +import com.google.api.client.util.Data; +import com.google.api.client.util.Lists; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; + +import java.io.Serializable; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Google BigQuery Table Field Value class. Objects of this class represent values of a BigQuery + * Table Field. A list of values forms a table row. Tables rows can be gotten as the result of a + * query or when listing table data. + */ +public class FieldValue implements Serializable { + + static final Function FROM_PB_FUNCTION = new Function() { + @Override + public FieldValue apply(Object pb) { + return FieldValue.fromPb(pb); + } + }; + private static final int MICROSECONDS = 1000000; + private static final long serialVersionUID = 469098630191710061L; + + private final Attribute attribute; + private final Object value; + + /** + * The field value's attribute, giving information on the field's content type. + */ + public enum Attribute { + /** + * A primitive field value. A {@code FieldValue} is primitive when the corresponding field has + * type {@link Field.Type#bool()}, {@link Field.Type#string()}, + * {@link Field.Type#floatingPoint()}, {@link Field.Type#integer()}, + * {@link Field.Type#timestamp()} or the value is set to {@code null}. + */ + PRIMITIVE, + + /** + * A {@code FieldValue} for a field with {@link Field.Mode#REPEATED} mode. + */ + REPEATED, + + /** + * A {@code FieldValue} for a field of type {@link Field.Type#record(Field...)}. + */ + RECORD + } + + FieldValue(Attribute attribute, Object value) { + this.attribute = attribute; + this.value = value; + } + + /** + * Returns the attribute of this Field Value. + * + * @return {@link Attribute#PRIMITIVE} if the field is a primitive type + * ({@link Field.Type#bool()}, {@link Field.Type#string()}, + * {@link Field.Type#floatingPoint()}, {@link Field.Type#integer()}, + * {@link Field.Type#timestamp()}) or is {@code null}. Returns {@link Attribute#REPEATED} if + * the corresponding field has ({@link Field.Mode#REPEATED}) mode. Returns + * {@link Attribute#RECORD} if the corresponding field is a + * {@link Field.Type#record(Field...)} type. + */ + public Attribute attribute() { + return attribute; + } + + /** + * Returns {@code true} if this field's value is {@code null}, {@code false} otherwise. + */ + public boolean isNull() { + return value == null; + } + + /** + * Returns this field's value as an {@link Object}. If {@link #isNull()} is {@code true} this + * method returns {@code null}. + */ + public Object value() { + return value; + } + + /** + * Returns this field's value as a {@link String}. This method should only be used if the + * corresponding field has primitive type ({@link Field.Type#bool()}, {@link Field.Type#string()}, + * {@link Field.Type#floatingPoint()}, {@link Field.Type#integer()}, + * {@link Field.Type#timestamp()}). + * + * @throws ClassCastException if the field is not a primitive type + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public String stringValue() { + checkNotNull(value); + return (String) value; + } + + /** + * Returns this field's value as a {@code long}. This method should only be used if the + * corresponding field has {@link Field.Type#integer()} type. + * + * @throws ClassCastException if the field is not a primitive type + * @throws NumberFormatException if the field's value could not be converted to {@link Integer} + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public long longValue() { + return Long.parseLong(stringValue()); + } + + /** + * Returns this field's value as a {@link Double}. This method should only be used if the + * corresponding field has {@link Field.Type#floatingPoint()} type. + * + * @throws ClassCastException if the field is not a primitive type + * @throws NumberFormatException if the field's value could not be converted to {@link Double} + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public double doubleValue() { + return Double.parseDouble(stringValue()); + } + + /** + * Returns this field's value as a {@link Boolean}. This method should only be used if the + * corresponding field has {@link Field.Type#bool()} type. + * + * @throws ClassCastException if the field is not a primitive type + * @throws IllegalStateException if the field's value could not be converted to {@link Boolean} + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public boolean booleanValue() { + String stringValue = stringValue(); + checkState(stringValue.equalsIgnoreCase("true") || stringValue.equalsIgnoreCase("false"), + "Field value is not of boolean type"); + return Boolean.parseBoolean(stringValue); + } + + /** + * Returns this field's value as a {@code long}, representing a timestamp in microseconds since + * epoch (UNIX time). This method should only be used if the corresponding field has + * {@link Field.Type#timestamp()} type. + * + * @throws ClassCastException if the field is not a primitive type + * @throws NumberFormatException if the field's value could not be converted to {@link Long} + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public long timestampValue() { + // timestamps are encoded in the format 1408452095.22 where the integer part is seconds since + // epoch (e.g. 1408452095.22 == 2014-08-19 07:41:35.220 -05:00) + return new Double(((Double.valueOf(stringValue())) * MICROSECONDS)).longValue(); + } + + /** + * Returns this field's value as a list of {@link FieldValue}. This method should only be used if + * the corresponding field has {@link Field.Mode#REPEATED} mode (i.e. {@link #attribute()} is + * {@link Attribute#REPEATED}). + * + * @throws ClassCastException if the field has not {@link Field.Mode#REPEATED} mode + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public List repeatedValue() { + checkNotNull(value); + return (List) value; + } + + /** + * Returns this field's value as a list of {@link FieldValue}. This method should only be used if + * the corresponding field has {@link Field.Type#record(Field...)} type (i.e. {@link #attribute()} + * is {@link Attribute#RECORD}). + * + * @throws ClassCastException if the field is not a {@link Field.Type#record(Field...)} type + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public List recordValue() { + checkNotNull(value); + return (List) value; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("attribute", attribute) + .add("value", value) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(attribute, value); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof FieldValue)) { + return false; + } + FieldValue other = (FieldValue) obj; + return attribute == other.attribute && Objects.equals(value, other.value); + } + + @SuppressWarnings("unchecked") + static FieldValue fromPb(Object cellPb) { + if (Data.isNull(cellPb)) { + return new FieldValue(Attribute.PRIMITIVE, null); + } + if (cellPb instanceof String) { + return new FieldValue(Attribute.PRIMITIVE, cellPb); + } + if (cellPb instanceof List) { + List cellsListPb = (List) cellPb; + List repeatedCells = Lists.newArrayListWithCapacity(cellsListPb.size()); + for (Object repeatedCellPb : cellsListPb) { + repeatedCells.add(FieldValue.fromPb(repeatedCellPb)); + } + return new FieldValue(Attribute.REPEATED, repeatedCells); + } + if (cellPb instanceof Map) { + Map cellMapPb = (Map) cellPb; + if (cellMapPb.containsKey("f")) { + List cellsListPb = (List) cellMapPb.get("f"); + List recordCells = Lists.newArrayListWithCapacity(cellsListPb.size()); + for (Object repeatedCellPb : cellsListPb) { + recordCells.add(FieldValue.fromPb(repeatedCellPb)); + } + return new FieldValue(Attribute.RECORD, recordCells); + } + // This should never be the case when we are processing a first level table field (i.e. a + // row's field, not a record sub-field) + if (cellMapPb.containsKey("v")) { + return FieldValue.fromPb(cellMapPb.get("v")); + } + } + throw new AssertionError("Unexpected table cell format"); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/FormatOptions.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/FormatOptions.java new file mode 100644 index 000000000000..f46e7b40f4c1 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/FormatOptions.java @@ -0,0 +1,95 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.base.MoreObjects; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Base class for Google BigQuery format options. These class define the format of external data + * used by BigQuery, for either federated tables or load jobs. + */ +public class FormatOptions implements Serializable { + + static final String CSV = "CSV"; + static final String JSON = "NEWLINE_DELIMITED_JSON"; + static final String DATASTORE_BACKUP = "DATASTORE_BACKUP"; + private static final long serialVersionUID = -443376052020423691L; + + private final String type; + + FormatOptions(String type) { + this.type = type; + } + + /** + * Returns the external data format, as a string. + */ + public String type() { + return type; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this).add("format", type).toString(); + } + + @Override + public int hashCode() { + return Objects.hash(type); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof FormatOptions && Objects.equals(type, ((FormatOptions) obj).type()); + } + + /** + * Default options for CSV format. + */ + public static CsvOptions csv() { + return CsvOptions.builder().build(); + } + + /** + * Default options for NEWLINE_DELIMITED_JSON format. + */ + public static FormatOptions json() { + return new FormatOptions(JSON); + } + + /** + * Default options for DATASTORE_BACKUP format. + */ + public static FormatOptions datastoreBackup() { + return new FormatOptions(DATASTORE_BACKUP); + } + + /** + * Default options for the provided format. + */ + public static FormatOptions of(String format) { + if (checkNotNull(format).equals(CSV)) { + return csv(); + } + return new FormatOptions(format); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/InsertAllRequest.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/InsertAllRequest.java new file mode 100644 index 000000000000..bd86f208480f --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/InsertAllRequest.java @@ -0,0 +1,456 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; + +import java.io.Serializable; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Google Cloud BigQuery insert all request. This class can be used to stream data into BigQuery one + * record at a time without needing to run a load job. This approach enables querying data without + * the delay of running a load job. There are several important trade-offs to consider before + * choosing an approach. + * + * @see Streaming Data into + * BigQuery + */ +public class InsertAllRequest implements Serializable { + + private static final long serialVersionUID = 211200307773853078L; + + private final TableId table; + private final List rows; + private final Boolean skipInvalidRows; + private final Boolean ignoreUnknownValues; + private final String templateSuffix; + + /** + * A Google Big Query row to be inserted into a table. Each {@code RowToInsert} has an associated + * id used by BigQuery to detect duplicate insertion requests on a best-effort basis. + * + *

Example usage of creating a row to insert: + *

    {@code
+   *   List repeatedFieldValue = Arrays.asList(1L, 2L);
+   *   Map recordContent = new HashMap();
+   *   recordContent.put("subfieldName1", "value");
+   *   recordContent.put("subfieldName2", repeatedFieldValue);
+   *   Map rowContent = new HashMap();
+   *   rowContent.put("fieldName1", true);
+   *   rowContent.put("fieldName2", recordContent);
+   *   RowToInsert row = new RowToInsert("rowId", rowContent);
+   * }
+ * + * @see + * Data Consistency + */ + public static class RowToInsert implements Serializable { + + private static final long serialVersionUID = 8563060538219179157L; + + private final String id; + private final Map content; + + RowToInsert(String id, Map content) { + this.id = id; + this.content = ImmutableMap.copyOf(content); + } + + /** + * Returns the id associated with the row. Returns {@code null} if not set. + */ + public String id() { + return id; + } + + /** + * Returns the actual content of the row, as a map. + */ + public Map content() { + return content; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("id", id) + .add("content", content) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(id, content); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof RowToInsert)) { + return false; + } + RowToInsert other = (RowToInsert) obj; + return Objects.equals(id, other.id) + && Objects.equals(content, other.content); + } + + /** + * Creates a row to be inserted with associated id. + * + * @param id id of the row, used to identify duplicates + * @param content the actual content of the row + */ + public static RowToInsert of(String id, Map content) { + return new RowToInsert(checkNotNull(id), checkNotNull(content)); + } + + /** + * Creates a row to be inserted without associated id. + * + * @param content the actual content of the row + */ + public static RowToInsert of(Map content) { + return new RowToInsert(null, checkNotNull(content)); + } + } + + public static final class Builder { + + private TableId table; + private List rows; + private Boolean skipInvalidRows; + private Boolean ignoreUnknownValues; + private String templateSuffix; + + private Builder() {} + + /** + * Sets the destination table for rows insert request. + */ + public Builder table(TableId table) { + this.table = checkNotNull(table); + return this; + } + + /** + * Sets the rows to insert as a list of {@link RowToInsert} objects. + */ + public Builder rows(Iterable rows) { + this.rows = Lists.newLinkedList(checkNotNull(rows)); + return this; + } + + /** + * Adds a row to be inserted. + */ + public Builder addRow(RowToInsert rowToInsert) { + checkNotNull(rowToInsert); + if (rows == null) { + rows = Lists.newArrayList(); + } + rows.add(rowToInsert); + return this; + } + + /** + * Adds a row to be inserted with associated id. + * + *

Example usage of adding a row with associated id: + *

    {@code
+     *   InsertAllRequest.Builder builder = InsertAllRequest.builder(tableId);
+     *   List repeatedFieldValue = Arrays.asList(1L, 2L);
+     *   Map recordContent = new HashMap();
+     *   recordContent.put("subfieldName1", "value");
+     *   recordContent.put("subfieldName2", repeatedFieldValue);
+     *   Map rowContent = new HashMap();
+     *   rowContent.put("fieldName1", true);
+     *   rowContent.put("fieldName2", recordContent);
+     *   builder.addRow("rowId", rowContent);
+     * }
+ */ + public Builder addRow(String id, Map content) { + addRow(new RowToInsert(id, content)); + return this; + } + + /** + * Adds a row to be inserted without an associated id. + * + *

Example usage of adding a row without an associated id: + *

    {@code
+     *   InsertAllRequest.Builder builder = InsertAllRequest.builder(tableId);
+     *   List repeatedFieldValue = Arrays.asList(1L, 2L);
+     *   Map recordContent = new HashMap();
+     *   recordContent.put("subfieldName1", "value");
+     *   recordContent.put("subfieldName2", repeatedFieldValue);
+     *   Map rowContent = new HashMap();
+     *   rowContent.put("fieldName1", true);
+     *   rowContent.put("fieldName2", recordContent);
+     *   builder.addRow(rowContent);
+     * }
+ */ + public Builder addRow(Map content) { + addRow(new RowToInsert(null, content)); + return this; + } + + /** + * Sets whether to insert all valid rows of a request, even if invalid rows exist. If not set + * the entire insert request will fail if it contains an invalid row. + */ + public Builder skipInvalidRows(boolean skipInvalidRows) { + this.skipInvalidRows = skipInvalidRows; + return this; + } + + /** + * Sets whether to accept rows that contain values that do not match the schema. The unknown + * values are ignored. If not set, rows with unknown values are considered to be invalid. + */ + public Builder ignoreUnknownValues(boolean ignoreUnknownValues) { + this.ignoreUnknownValues = ignoreUnknownValues; + return this; + } + + /** + * If specified, the destination table is treated as a base template. Rows are inserted into an + * instance table named "{destination}{templateSuffix}". BigQuery will manage the creation of + * the instance table, using the schema of the base template table. Table creation might take + * some time. To obtain table's information after {@link BigQuery#insertAll(InsertAllRequest)} + * is called use: + *
 {@code
+     * String suffixTableId = ...;
+     * BaseTableInfo suffixTable = bigquery.getTable(DATASET, suffixTableId);
+     * while (suffixTable == null) {
+     *   Thread.sleep(1000L);
+     *   suffixTable = bigquery.getTable(DATASET, suffixTableId);
+     * }}
+ * + * @see + * Template Tables + */ + public Builder templateSuffix(String templateSuffix) { + this.templateSuffix = templateSuffix; + return this; + } + + public InsertAllRequest build() { + return new InsertAllRequest(this); + } + } + + private InsertAllRequest(Builder builder) { + this.table = checkNotNull(builder.table); + this.rows = ImmutableList.copyOf(checkNotNull(builder.rows)); + this.ignoreUnknownValues = builder.ignoreUnknownValues; + this.skipInvalidRows = builder.skipInvalidRows; + this.templateSuffix = builder.templateSuffix; + } + + /** + * Returns the destination table for rows insert request. + */ + public TableId table() { + return table; + } + + /** + * Returns the rows to be inserted. + */ + public List rows() { + return rows; + } + + /** + * Returns whether to accept rows that contain values that do not match the schema. The unknown + * values are ignored. If not set, rows with unknown values are considered to be invalid. + */ + public Boolean ignoreUnknownValues() { + return ignoreUnknownValues; + } + + /** + * Returns whether to insert all valid rows of a request, even if invalid rows exist. If not set + * the entire insert request will fail if it contains an invalid row. + */ + public Boolean skipInvalidRows() { + return skipInvalidRows; + } + + /** + * If specified, the destination table is treated as a base template. Rows are inserted into an + * instance table named "{destination}{templateSuffix}". BigQuery will manage the creation of the + * instance table, using the schema of the base template table. Table creation might take some + * time. To obtain table's information after {@link BigQuery#insertAll(InsertAllRequest)} is + * called use: + *
 {@code
+   * String suffixTableId = ...;
+   * BaseTableInfo suffixTable = bigquery.getTable(DATASET, suffixTableId);
+   * while (suffixTable == null) {
+   *   Thread.sleep(1000L);
+   *   suffixTable = bigquery.getTable(DATASET, suffixTableId);
+   * }}
+ * + * @see + * Template Tables + */ + public String templateSuffix() { + return templateSuffix; + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table. + */ + public static Builder builder(TableId table) { + return new Builder().table(table); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table and the + * rows to insert. + */ + public static Builder builder(TableId table, Iterable rows) { + return builder(table).rows(rows); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table and the + * rows to insert. + */ + public static Builder builder(TableId table, RowToInsert... rows) { + return builder(table, ImmutableList.copyOf(rows)); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table. + */ + public static Builder builder(String datasetId, String tableId) { + return new Builder().table(TableId.of(datasetId, tableId)); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table and the + * rows to insert. + */ + public static Builder builder(String datasetId, String tableId, Iterable rows) { + return builder(TableId.of(datasetId, tableId), rows); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table and the + * rows to insert. + */ + public static Builder builder(String datasetId, String tableId, RowToInsert... rows) { + return builder(TableId.of(datasetId, tableId), rows); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table and the + * rows to insert. + */ + public static Builder builder(BaseTableInfo tableInfo, Iterable rows) { + return builder(tableInfo.tableId(), rows); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table and the + * rows to insert. + */ + public static Builder builder(BaseTableInfo tableInfo, RowToInsert... rows) { + return builder(tableInfo.tableId(), rows); + } + + /** + * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. + */ + public static InsertAllRequest of(TableId tableId, Iterable rows) { + return builder(tableId, rows).build(); + } + + /** + * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. + */ + public static InsertAllRequest of(TableId tableId, RowToInsert... rows) { + return builder(tableId, rows).build(); + } + + /** + * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. + */ + public static InsertAllRequest of(String datasetId, String tableId, Iterable rows) { + return builder(datasetId, tableId, rows).build(); + } + + /** + * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. + */ + public static InsertAllRequest of(String datasetId, String tableId, RowToInsert... rows) { + return builder(datasetId, tableId, rows).build(); + } + + /** + * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. + */ + public static InsertAllRequest of(BaseTableInfo tableInfo, Iterable rows) { + return builder(tableInfo.tableId(), rows).build(); + } + + /** + * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. + */ + public static InsertAllRequest of(BaseTableInfo tableInfo, RowToInsert... rows) { + return builder(tableInfo.tableId(), rows).build(); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("table", table) + .add("rows", rows) + .add("ignoreUnknownValues", ignoreUnknownValues) + .add("skipInvalidRows", skipInvalidRows) + .add("templateSuffix", templateSuffix) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(table, rows, ignoreUnknownValues, skipInvalidRows, templateSuffix); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof InsertAllRequest)) { + return false; + } + InsertAllRequest other = (InsertAllRequest) obj; + return Objects.equals(table, other.table) + && Objects.equals(rows, other.rows) + && Objects.equals(ignoreUnknownValues, other.ignoreUnknownValues) + && Objects.equals(skipInvalidRows, other.skipInvalidRows) + && Objects.equals(templateSuffix, other.templateSuffix); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/InsertAllResponse.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/InsertAllResponse.java new file mode 100644 index 000000000000..992c5d851bbc --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/InsertAllResponse.java @@ -0,0 +1,121 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.api.services.bigquery.model.ErrorProto; +import com.google.api.services.bigquery.model.TableDataInsertAllResponse; +import com.google.api.services.bigquery.model.TableDataInsertAllResponse.InsertErrors; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; + +import java.io.Serializable; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Google Cloud BigQuery insert all response. Objects of this class possibly contain errors for an + * {@link InsertAllRequest}. If a row failed to be inserted, the non-empty list of errors associated + * to that row's index can be obtained with {@link InsertAllResponse#errorsFor(long)}. + * {@link InsertAllResponse#insertErrors()} can be used to return all errors caused by a + * {@link InsertAllRequest} as a map. + */ +public class InsertAllResponse implements Serializable { + + private static final long serialVersionUID = -6934152676514098452L; + + private final Map> insertErrors; + + InsertAllResponse(Map> insertErrors) { + this.insertErrors = insertErrors != null ? ImmutableMap.copyOf(insertErrors) + : ImmutableMap.>of(); + } + + /** + * Returns all insertion errors as a map whose keys are indexes of rows that failed to insert. + * Each failed row index is associated with a non-empty list of {@link BigQueryError}. + */ + public Map> insertErrors() { + return insertErrors; + } + + /** + * Returns errors for the provided row index. If no error exists returns {@code null}. + */ + public List errorsFor(long index) { + return insertErrors.get(index); + } + + /** + * Returns {@code true} if no row insertion failed, {@code false} otherwise. If {@code false} + * {@link #insertErrors()} returns an empty map. + */ + public boolean hasErrors() { + return !insertErrors.isEmpty(); + } + + @Override + public int hashCode() { + return Objects.hash(insertErrors); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof InsertAllResponse + && Objects.equals(insertErrors, ((InsertAllResponse) obj).insertErrors); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this).add("insertErrors", insertErrors).toString(); + } + + TableDataInsertAllResponse toPb() { + TableDataInsertAllResponse responsePb = new TableDataInsertAllResponse(); + if (!insertErrors.isEmpty()) { + responsePb.setInsertErrors(ImmutableList.copyOf(Iterables.transform(insertErrors.entrySet(), + new Function>, InsertErrors>() { + @Override + public InsertErrors apply(Map.Entry> entry) { + return new InsertErrors() + .setIndex(entry.getKey()) + .setErrors(Lists.transform(entry.getValue(), BigQueryError.TO_PB_FUNCTION)); + } + }))); + } + return responsePb; + } + + static InsertAllResponse fromPb(TableDataInsertAllResponse responsePb) { + Map> insertErrors = null; + if (responsePb.getInsertErrors() != null) { + List errorsPb = responsePb.getInsertErrors(); + insertErrors = Maps.newHashMapWithExpectedSize(errorsPb.size()); + for (InsertErrors errorPb : errorsPb) { + insertErrors.put(errorPb.getIndex(), Lists.transform( + errorPb.getErrors() != null ? errorPb.getErrors() : ImmutableList.of(), + BigQueryError.FROM_PB_FUNCTION)); + } + } + return new InsertAllResponse(insertErrors); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Job.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Job.java new file mode 100644 index 000000000000..c0d7ddc29c37 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Job.java @@ -0,0 +1,126 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +/** + * A Google BigQuery Job. + * + *

Objects of this class are immutable. To get a {@code Job} object with the most recent + * information use {@link #reload}. + *

+ */ +public final class Job { + + private final BigQuery bigquery; + private final JobInfo info; + + /** + * Constructs a {@code Job} object for the provided {@code JobInfo}. The BigQuery service + * is used to issue requests. + * + * @param bigquery the BigQuery service used for issuing requests + * @param info jobs's info + */ + public Job(BigQuery bigquery, JobInfo info) { + this.bigquery = checkNotNull(bigquery); + this.info = checkNotNull(info); + } + + /** + * Creates a {@code Job} object for the provided job's user-defined id. Performs an RPC call to + * get the latest job information. + * + * @param bigquery the BigQuery service used for issuing requests + * @param job job's id, either user-defined or picked by the BigQuery service + * @param options job options + * @return the {@code Job} object or {@code null} if not found + * @throws BigQueryException upon failure + */ + public static Job get(BigQuery bigquery, String job, BigQuery.JobOption... options) { + JobInfo info = bigquery.getJob(job, options); + return info != null ? new Job(bigquery, info) : null; + } + + /** + * Returns the job's information. + */ + public JobInfo info() { + return info; + } + + /** + * Checks if this job exists. + * + * @return {@code true} if this job exists, {@code false} otherwise + * @throws BigQueryException upon failure + */ + public boolean exists() { + return bigquery.getJob(info.jobId(), BigQuery.JobOption.fields()) != null; + } + + /** + * Checks if this job has completed its execution, either failing or succeeding. If the job does + * not exist this method returns {@code false}. To correctly wait for job's completion check that + * the job exists first, using {@link #exists()}: + *
 {@code
+   * if (job.exists()) {
+   *   while(!job.isDone()) {
+   *     Thread.sleep(1000L);
+   *   }
+   * }}
+ * + * @return {@code true} if this job is in {@link JobStatus.State#DONE} state, {@code false} if the + * state is not {@link JobStatus.State#DONE} or the job does not exist + * @throws BigQueryException upon failure + */ + public boolean isDone() { + JobInfo job = bigquery.getJob(info.jobId(), + BigQuery.JobOption.fields(BigQuery.JobField.STATUS)); + return job != null && job.status().state() == JobStatus.State.DONE; + } + + /** + * Fetches current job's latest information. Returns {@code null} if the job does not exist. + * + * @param options job options + * @return a {@code Job} object with latest information or {@code null} if not found + * @throws BigQueryException upon failure + */ + public Job reload(BigQuery.JobOption... options) { + return Job.get(bigquery, info.jobId().job(), options); + } + + /** + * Sends a job cancel request. + * + * @return {@code true} if cancel request was sent successfully, {@code false} if job was not + * found + * @throws BigQueryException upon failure + */ + public boolean cancel() { + return bigquery.cancel(info.jobId()); + } + + /** + * Returns the job's {@code BigQuery} object used to issue requests. + */ + public BigQuery bigquery() { + return bigquery; + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobConfiguration.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobConfiguration.java new file mode 100644 index 000000000000..2244969567ef --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobConfiguration.java @@ -0,0 +1,145 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.base.MoreObjects; +import com.google.common.base.MoreObjects.ToStringHelper; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Base class for a BigQuery job configuration. + */ +public abstract class JobConfiguration implements Serializable { + + private static final long serialVersionUID = -548132177415406526L; + + private final Type type; + + /** + * Type of a BigQuery Job. + */ + enum Type { + /** + * A Copy Job copies an existing table to another new or existing table. Instances of + * {@code JobConfiguration} for this type are implemented by {@link CopyJobConfiguration}. + */ + COPY, + /** + * An Extract Job exports a BigQuery table to Google Cloud Storage. Instances of + * {@code JobConfiguration} for this type are implemented by {@link ExtractJobConfiguration}. + */ + EXTRACT, + /** + * A Load Job loads data from one of several formats into a table. Instances of + * {@code JobConfiguration} for this type are implemented by {@link LoadJobConfiguration}. + */ + LOAD, + /** + * A Query Job runs a query against BigQuery data. Instances of + * {@code JobConfiguration} for this type are implemented by {@link QueryJobConfiguration}. + */ + QUERY + } + + /** + * Base builder for job configurations. + * + * @param the job configuration type + * @param the job configuration builder + */ + public abstract static class Builder> { + + private Type type; + + Builder(Type type) { + this.type = checkNotNull(type); + } + + @SuppressWarnings("unchecked") + B self() { + return (B) this; + } + + B type(Type type) { + this.type = checkNotNull(type); + return self(); + } + + /** + * Creates an object. + */ + public abstract T build(); + } + + JobConfiguration(Builder builder) { + this.type = builder.type; + } + + /** + * Returns the type of the job configuration. + */ + public Type type() { + return type; + } + + /** + * Returns a builder for the object. + */ + public abstract Builder toBuilder(); + + ToStringHelper toStringHelper() { + return MoreObjects.toStringHelper(this).add("type", type); + } + + @Override + public String toString() { + return toStringHelper().toString(); + } + + final int baseHashCode() { + return Objects.hash(type); + } + + final boolean baseEquals(JobConfiguration jobConfiguration) { + return Objects.equals(toPb(), jobConfiguration.toPb()); + } + + abstract JobConfiguration setProjectId(String projectId); + + abstract com.google.api.services.bigquery.model.JobConfiguration toPb(); + + @SuppressWarnings("unchecked") + static T fromPb( + com.google.api.services.bigquery.model.JobConfiguration configurationPb) { + if (configurationPb.getCopy() != null) { + return (T) CopyJobConfiguration.fromPb(configurationPb); + } else if (configurationPb.getExtract() != null) { + return (T) ExtractJobConfiguration.fromPb(configurationPb); + } else if (configurationPb.getLoad() != null) { + return (T) LoadJobConfiguration.fromPb(configurationPb); + } else if (configurationPb.getQuery() != null) { + return (T) QueryJobConfiguration.fromPb(configurationPb); + } else { + // never reached + throw new IllegalArgumentException("Job configuration is not supported"); + } + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobId.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobId.java new file mode 100644 index 000000000000..898c894f9a21 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobId.java @@ -0,0 +1,91 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.JobReference; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Google BigQuery Job identity. + */ +public class JobId implements Serializable { + + private static final long serialVersionUID = 1225914835379688976L; + + private final String project; + private final String job; + + /** + * Returns project's user-defined id. + */ + public String project() { + return project; + } + + /** + * Returns the job's user-defined id. + */ + public String job() { + return job; + } + + private JobId(String project, String job) { + this.project = project; + this.job = job; + } + + /** + * Creates a job identity given project's and job's user-defined id. + */ + public static JobId of(String project, String job) { + return new JobId(checkNotNull(project), checkNotNull(job)); + } + + /** + * Creates a job identity given only its user-defined id. + */ + public static JobId of(String job) { + return new JobId(null, checkNotNull(job)); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof JobId && Objects.equals(toPb(), ((JobId) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(project, job); + } + + @Override + public String toString() { + return toPb().toString(); + } + + JobReference toPb() { + return new JobReference().setProjectId(project).setJobId(job); + } + + static JobId fromPb(JobReference jobRef) { + return new JobId(jobRef.getProjectId(), jobRef.getJobId()); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobInfo.java new file mode 100644 index 000000000000..47135b6d97d0 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobInfo.java @@ -0,0 +1,319 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.api.services.bigquery.model.Job; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Google BigQuery Job information. Jobs are objects that manage asynchronous tasks such as running + * queries, loading data, and exporting data. Use {@link CopyJobConfiguration} for a job that + * copies an existing table. Use {@link ExtractJobConfiguration} for a job that exports a table to + * Google Cloud Storage. Use {@link LoadJobConfiguration} for a job that loads data from Google + * Cloud Storage into a table. Use {@link QueryJobConfiguration} for a job that runs a query. + * + * @see Jobs + */ +public final class JobInfo implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public JobInfo apply(Job pb) { + return JobInfo.fromPb(pb); + } + }; + private static final long serialVersionUID = -3272941007234620265L; + + /** + * Specifies whether the job is allowed to create new tables. + */ + public enum CreateDisposition { + /** + * Configures the job to create the table if it does not exist. + */ + CREATE_IF_NEEDED, + + /** + * Configures the job to fail with a not-found error if the table does not exist. + */ + CREATE_NEVER + } + + /** + * Specifies the action that occurs if the destination table already exists. + */ + public enum WriteDisposition { + /** + * Configures the job to overwrite the table data if table already exists. + */ + WRITE_TRUNCATE, + + /** + * Configures the job to append data to the table if it already exists. + */ + WRITE_APPEND, + + /** + * Configures the job to fail with a duplicate error if the table already exists. + */ + WRITE_EMPTY + } + + private final String etag; + private final String id; + private final JobId jobId; + private final String selfLink; + private final JobStatus status; + private final JobStatistics statistics; + private final String userEmail; + private final JobConfiguration configuration; + + public static final class Builder { + + private String etag; + private String id; + private JobId jobId; + private String selfLink; + private JobStatus status; + private JobStatistics statistics; + private String userEmail; + private JobConfiguration configuration; + + private Builder() {} + + private Builder(JobInfo jobInfo) { + this.etag = jobInfo.etag; + this.id = jobInfo.id; + this.jobId = jobInfo.jobId; + this.selfLink = jobInfo.selfLink; + this.status = jobInfo.status; + this.statistics = jobInfo.statistics; + this.userEmail = jobInfo.userEmail; + this.configuration = jobInfo.configuration; + } + + protected Builder(Job jobPb) { + this.etag = jobPb.getEtag(); + this.id = jobPb.getId(); + if (jobPb.getJobReference() != null) { + this.jobId = JobId.fromPb(jobPb.getJobReference()); + } + this.selfLink = jobPb.getSelfLink(); + if (jobPb.getStatus() != null) { + this.status = JobStatus.fromPb(jobPb.getStatus()); + } + if (jobPb.getStatistics() != null) { + this.statistics = JobStatistics.fromPb(jobPb.getStatistics()); + } + this.userEmail = jobPb.getUserEmail(); + this.configuration = JobConfiguration.fromPb(jobPb.getConfiguration()); + } + + Builder etag(String etag) { + this.etag = etag; + return this; + } + + Builder id(String id) { + this.id = id; + return this; + } + + /** + * Sets the job identity. + */ + public Builder jobId(JobId jobId) { + this.jobId = jobId; + return this; + } + + Builder selfLink(String selfLink) { + this.selfLink = selfLink; + return this; + } + + Builder status(JobStatus status) { + this.status = status; + return this; + } + + Builder statistics(JobStatistics statistics) { + this.statistics = statistics; + return this; + } + + Builder userEmail(String userEmail) { + this.userEmail = userEmail; + return this; + } + + public Builder configuration(JobConfiguration configuration) { + this.configuration = configuration; + return this; + } + + public JobInfo build() { + return new JobInfo(this); + } + } + + private JobInfo(Builder builder) { + this.jobId = builder.jobId; + this.etag = builder.etag; + this.id = builder.id; + this.selfLink = builder.selfLink; + this.status = builder.status; + this.statistics = builder.statistics; + this.userEmail = builder.userEmail; + this.configuration = builder.configuration; + } + + /** + * Returns the hash of the job resource. + */ + public String etag() { + return etag; + } + + /** + * Returns an opaque id for the job. + */ + public String id() { + return id; + } + + /** + * Returns the job identity. + */ + public JobId jobId() { + return jobId; + } + + /** + * Returns an URL that can be used to access the resource again. The returned URL can be used for + * GET requests. + */ + public String selfLink() { + return selfLink; + } + + /** + * Returns the status of this job. Examine this value when polling an asynchronous job to see if + * the job is complete. + */ + public JobStatus status() { + return status; + } + + /** + * Returns information about the job, including starting time and ending time of the job. + */ + @SuppressWarnings("unchecked") + public S statistics() { + return (S) statistics; + } + + /** + * Returns the email address of the user who ran the job. + */ + public String userEmail() { + return userEmail; + } + + /** + * Returns the job's configuration. + */ + @SuppressWarnings("unchecked") + public C configuration() { + return (C) configuration; + } + + /** + * Returns a builder for the job. + */ + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("job", jobId) + .add("status", status) + .add("statistics", statistics) + .add("userEmail", userEmail) + .add("etag", etag) + .add("id", id) + .add("selfLink", selfLink) + .add("configuration", configuration) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(jobId); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof JobInfo && Objects.equals(toPb(), ((JobInfo) obj).toPb()); + } + + JobInfo setProjectId(String projectId) { + return toBuilder().configuration(configuration.setProjectId(projectId)).build(); + } + + Job toPb() { + Job jobPb = new Job(); + jobPb.setEtag(etag); + jobPb.setId(id); + jobPb.setSelfLink(selfLink); + jobPb.setUserEmail(userEmail); + if (jobId != null) { + jobPb.setJobReference(jobId.toPb()); + } + if (status != null) { + jobPb.setStatus(status.toPb()); + } + if (statistics != null) { + jobPb.setStatistics(statistics.toPb()); + } + jobPb.setConfiguration(configuration.toPb()); + return jobPb; + } + + public static Builder builder(JobConfiguration configuration) { + return new Builder().configuration(configuration); + } + + public static JobInfo of(JobConfiguration configuration) { + return builder(configuration).build(); + } + + public static JobInfo of(JobId jobId, JobConfiguration configuration) { + return builder(configuration).jobId(jobId).build(); + } + + static JobInfo fromPb(Job jobPb) { + return new Builder(jobPb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatistics.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatistics.java new file mode 100644 index 000000000000..34e4917921ba --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatistics.java @@ -0,0 +1,516 @@ +package com.google.gcloud.bigquery; + +import com.google.api.services.bigquery.model.JobStatistics2; +import com.google.api.services.bigquery.model.JobStatistics3; +import com.google.api.services.bigquery.model.JobStatistics4; +import com.google.common.base.MoreObjects; +import com.google.common.base.MoreObjects.ToStringHelper; +import com.google.common.collect.Lists; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * A Google BigQuery Job statistics. + */ +public class JobStatistics implements Serializable { + + private static final long serialVersionUID = 1433024714741660399L; + + private final Long creationTime; + private final Long endTime; + private final Long startTime; + + /** + * A Google BigQuery Extract Job statistics. + */ + public static class ExtractStatistics extends JobStatistics { + + private static final long serialVersionUID = -1566598819212767373L; + + private final List destinationUriFileCounts; + + static final class Builder extends JobStatistics.Builder { + + private List destinationUriFileCounts; + + private Builder() {} + + private Builder(com.google.api.services.bigquery.model.JobStatistics statisticsPb) { + super(statisticsPb); + this.destinationUriFileCounts = statisticsPb.getExtract().getDestinationUriFileCounts(); + } + + Builder destinationUriFileCounts(List destinationUriFileCounts) { + this.destinationUriFileCounts = destinationUriFileCounts; + return self(); + } + + @Override + ExtractStatistics build() { + return new ExtractStatistics(this); + } + } + + private ExtractStatistics(Builder builder) { + super(builder); + this.destinationUriFileCounts = builder.destinationUriFileCounts; + } + + /** + * Returns the number of files per destination URI or URI pattern specified in the extract job. + * These values will be in the same order as the URIs specified by + * {@link ExtractJobConfiguration#destinationUris()}. + */ + public List destinationUriFileCounts() { + return destinationUriFileCounts; + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper().add("destinationUriFileCounts", destinationUriFileCounts); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof ExtractStatistics + && Objects.equals(toPb(), ((ExtractStatistics) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), destinationUriFileCounts); + } + + @Override + com.google.api.services.bigquery.model.JobStatistics toPb() { + com.google.api.services.bigquery.model.JobStatistics statisticsPb = super.toPb(); + return statisticsPb.setExtract( + new JobStatistics4().setDestinationUriFileCounts(destinationUriFileCounts)); + } + + static Builder builder() { + return new Builder(); + } + + @SuppressWarnings("unchecked") + static ExtractStatistics fromPb( + com.google.api.services.bigquery.model.JobStatistics statisticPb) { + return new Builder(statisticPb).build(); + } + } + + /** + * A Google BigQuery Load Job statistics. + */ + public static class LoadStatistics extends JobStatistics { + + private static final long serialVersionUID = -707369246536309215L; + + private final Long inputBytes; + private final Long inputFiles; + private final Long outputBytes; + private final Long outputRows; + + static final class Builder extends JobStatistics.Builder { + + private Long inputBytes; + private Long inputFiles; + private Long outputBytes; + private Long outputRows; + + private Builder() {} + + private Builder(com.google.api.services.bigquery.model.JobStatistics statisticsPb) { + super(statisticsPb); + this.inputBytes = statisticsPb.getLoad().getInputFileBytes(); + this.inputFiles = statisticsPb.getLoad().getInputFiles(); + this.outputBytes = statisticsPb.getLoad().getOutputBytes(); + this.outputRows = statisticsPb.getLoad().getOutputRows(); + } + + Builder inputBytes(Long inputBytes) { + this.inputBytes = inputBytes; + return self(); + } + + Builder inputFiles(Long inputFiles) { + this.inputFiles = inputFiles; + return self(); + } + + Builder outputBytes(Long outputBytes) { + this.outputBytes = outputBytes; + return self(); + } + + Builder outputRows(Long outputRows) { + this.outputRows = outputRows; + return self(); + } + + @Override + LoadStatistics build() { + return new LoadStatistics(this); + } + } + + private LoadStatistics(Builder builder) { + super(builder); + this.inputBytes = builder.inputBytes; + this.inputFiles = builder.inputFiles; + this.outputBytes = builder.outputBytes; + this.outputRows = builder.outputRows; + + } + + /** + * Returns the number of bytes of source data in a load job. + */ + public Long inputBytes() { + return inputBytes; + } + + /** + * Returns the number of source files in a load job. + */ + public Long inputFiles() { + return inputFiles; + } + + /** + * Returns the size of the data loaded by a load job so far, in bytes. + */ + public Long outputBytes() { + return outputBytes; + } + + /** + * Returns the number of rows loaded by a load job so far. + */ + public Long outputRows() { + return outputRows; + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("inputBytes", inputBytes) + .add("inputFiles", inputFiles) + .add("outputBytes", outputBytes) + .add("outputRows", outputRows); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof LoadStatistics && Objects.equals(toPb(), ((LoadStatistics) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), inputBytes, inputFiles, outputBytes, outputRows); + } + + @Override + com.google.api.services.bigquery.model.JobStatistics toPb() { + JobStatistics3 loadStatisticsPb = new JobStatistics3(); + loadStatisticsPb.setInputFileBytes(inputBytes); + loadStatisticsPb.setInputFiles(inputFiles); + loadStatisticsPb.setOutputBytes(outputBytes); + loadStatisticsPb.setOutputRows(outputRows); + return super.toPb().setLoad(loadStatisticsPb); + } + + static Builder builder() { + return new Builder(); + } + + @SuppressWarnings("unchecked") + static LoadStatistics fromPb(com.google.api.services.bigquery.model.JobStatistics statisticPb) { + return new Builder(statisticPb).build(); + } + } + + /** + * A Google BigQuery Query Job statistics. + */ + public static class QueryStatistics extends JobStatistics { + + private static final long serialVersionUID = 7539354109226732353L; + + private final Integer billingTier; + private final Boolean cacheHit; + private final Long totalBytesBilled; + private final Long totalBytesProcessed; + private final List queryPlan; + + static final class Builder extends JobStatistics.Builder { + + private Integer billingTier; + private Boolean cacheHit; + private Long totalBytesBilled; + private Long totalBytesProcessed; + private List queryPlan; + + private Builder() {} + + private Builder(com.google.api.services.bigquery.model.JobStatistics statisticsPb) { + super(statisticsPb); + this.billingTier = statisticsPb.getQuery().getBillingTier(); + this.cacheHit = statisticsPb.getQuery().getCacheHit(); + this.totalBytesBilled = statisticsPb.getQuery().getTotalBytesBilled(); + this.totalBytesProcessed = statisticsPb.getQuery().getTotalBytesProcessed(); + if (statisticsPb.getQuery().getQueryPlan() != null) { + this.queryPlan = + Lists.transform(statisticsPb.getQuery().getQueryPlan(), QueryStage.FROM_PB_FUNCTION); + } + } + + Builder billingTier(Integer billingTier) { + this.billingTier = billingTier; + return self(); + } + + Builder cacheHit(Boolean cacheHit) { + this.cacheHit = cacheHit; + return self(); + } + + Builder totalBytesBilled(Long totalBytesBilled) { + this.totalBytesBilled = totalBytesBilled; + return self(); + } + + Builder totalBytesProcessed(Long totalBytesProcessed) { + this.totalBytesProcessed = totalBytesProcessed; + return self(); + } + + Builder queryPlan(List queryPlan) { + this.queryPlan = queryPlan; + return self(); + } + + @Override + QueryStatistics build() { + return new QueryStatistics(this); + } + } + + private QueryStatistics(Builder builder) { + super(builder); + this.billingTier = builder.billingTier; + this.cacheHit = builder.cacheHit; + this.totalBytesBilled = builder.totalBytesBilled; + this.totalBytesProcessed = builder.totalBytesProcessed; + this.queryPlan = builder.queryPlan; + } + + /** + * Returns the billing tier for the job. + */ + public Integer billingTier() { + return billingTier; + } + + /** + * Returns whether the query result was fetched from the query cache. + * + * @see + * Query Caching + */ + public Boolean cacheHit() { + return cacheHit; + } + + /** + * Returns the total number of bytes billed for the job. + */ + public Long totalBytesBilled() { + return totalBytesBilled; + } + + /** + * Returns the total number of bytes processed by the job. + */ + public Long totalBytesProcessed() { + return totalBytesProcessed; + } + + /** + * Returns the query plan as a list of stages or {@code null} if a query plan is not available. + * Each stage involves a number of steps that read from data sources, perform a series of + * transformations on the input, and emit an output to a future stage (or the final result). The + * query plan is available for a completed query job and is retained for 7 days. + * + * @see Query Plan + */ + public List queryPlan() { + return queryPlan; + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("billingTier", billingTier) + .add("cacheHit", cacheHit) + .add("totalBytesBilled", totalBytesBilled) + .add("totalBytesProcessed", totalBytesProcessed) + .add("queryPlan", queryPlan); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof QueryStatistics + && Objects.equals(toPb(), ((QueryStatistics) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), billingTier, cacheHit, totalBytesBilled, + totalBytesProcessed, queryPlan); + } + + @Override + com.google.api.services.bigquery.model.JobStatistics toPb() { + JobStatistics2 queryStatisticsPb = new JobStatistics2(); + queryStatisticsPb.setBillingTier(billingTier); + queryStatisticsPb.setCacheHit(cacheHit); + queryStatisticsPb.setTotalBytesBilled(totalBytesBilled); + queryStatisticsPb.setTotalBytesProcessed(totalBytesProcessed); + if (queryPlan != null) { + queryStatisticsPb.setQueryPlan(Lists.transform(queryPlan, QueryStage.TO_PB_FUNCTION)); + } + return super.toPb().setQuery(queryStatisticsPb); + } + + static Builder builder() { + return new Builder(); + } + + @SuppressWarnings("unchecked") + static QueryStatistics fromPb( + com.google.api.services.bigquery.model.JobStatistics statisticPb) { + return new Builder(statisticPb).build(); + } + } + + static class Builder> { + + private Long creationTime; + private Long endTime; + private Long startTime; + + protected Builder() {} + + protected Builder(com.google.api.services.bigquery.model.JobStatistics statisticsPb) { + this.creationTime = statisticsPb.getCreationTime(); + this.endTime = statisticsPb.getEndTime(); + this.startTime = statisticsPb.getStartTime(); + } + + @SuppressWarnings("unchecked") + protected B self() { + return (B) this; + } + + B creationTime(Long creationTime) { + this.creationTime = creationTime; + return self(); + } + + B endTime(Long endTime) { + this.endTime = endTime; + return self(); + } + + B startTime(Long startTime) { + this.startTime = startTime; + return self(); + } + + @SuppressWarnings("unchecked") + T build() { + return (T) new JobStatistics(this); + } + } + + protected JobStatistics(Builder builder) { + this.creationTime = builder.creationTime; + this.endTime = builder.endTime; + this.startTime = builder.startTime; + } + + /** + * Returns the creation time of the job in milliseconds since epoch. + */ + public Long creationTime() { + return creationTime; + } + + /** + * Returns the end time of the job in milliseconds since epoch. Returns {@code null} if the + * job has not finished yet. + */ + public Long endTime() { + return endTime; + } + + /** + * Returns the start time of the job in milliseconds since epoch. Returns {@code null} if the + * job has not started yet. + */ + public Long startTime() { + return startTime; + } + + ToStringHelper toStringHelper() { + return MoreObjects.toStringHelper(this) + .add("creationTime", creationTime) + .add("endTime", endTime) + .add("startTime", startTime); + } + + @Override + public String toString() { + return toStringHelper().toString(); + } + + @Override + public int hashCode() { + return Objects.hash(creationTime, endTime, startTime); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof JobStatistics && Objects.equals(toPb(), ((JobStatistics) obj).toPb()); + } + + com.google.api.services.bigquery.model.JobStatistics toPb() { + com.google.api.services.bigquery.model.JobStatistics statistics = + new com.google.api.services.bigquery.model.JobStatistics(); + statistics.setCreationTime(creationTime); + statistics.setEndTime(endTime); + statistics.setStartTime(startTime); + return statistics; + } + + static Builder builder() { + return new Builder(); + } + + @SuppressWarnings("unchecked") + static T fromPb( + com.google.api.services.bigquery.model.JobStatistics statisticPb) { + if (statisticPb.getLoad() != null) { + return (T) LoadStatistics.fromPb(statisticPb); + } else if (statisticPb.getExtract() != null) { + return (T) ExtractStatistics.fromPb(statisticPb); + } else if (statisticPb.getQuery() != null) { + return (T) QueryStatistics.fromPb(statisticPb); + } else { + return (T) new Builder(statisticPb).build(); + } + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatus.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatus.java new file mode 100644 index 000000000000..738a644a5dde --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatus.java @@ -0,0 +1,130 @@ +package com.google.gcloud.bigquery; + +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * A Google BigQuery Job status. Objects of this class can be examined when polling an asynchronous + * job to see if the job completed. + */ +public class JobStatus implements Serializable { + + private static final long serialVersionUID = -714976456815445365L; + + /** + * Possible states that a BigQuery Job can assume. + */ + public enum State { + /** + * The BigQuery Job is waiting to be executed. + */ + PENDING, + + /** + * The BigQuery Job is being executed. + */ + RUNNING, + + /** + * The BigQuery Job has completed either succeeding or failing. If failed {@link #error()} will + * be non-null. + */ + DONE + } + + private final State state; + private final BigQueryError error; + private final List executionErrors; + + JobStatus(State state) { + this.state = state; + this.error = null; + this.executionErrors = null; + } + + JobStatus(State state, BigQueryError error, List executionErrors) { + this.state = state; + this.error = error; + this.executionErrors = executionErrors != null ? ImmutableList.copyOf(executionErrors) : null; + } + + /** + * Returns the state of the job. A {@link State#PENDING} job is waiting to be executed. A + * {@link State#RUNNING} is being executed. A {@link State#DONE} job has completed either + * succeeding or failing. If failed {@link #error()} will be non-null. + */ + public State state() { + return state; + } + + /** + * Returns the final error result of the job. If present, indicates that the job has completed + * and was unsuccessful. + * + * @see + * Troubleshooting Errors + */ + public BigQueryError error() { + return error; + } + + /** + * Returns all errors encountered during the running of the job. Errors here do not necessarily + * mean that the job has completed or was unsuccessful. + * + * @see + * Troubleshooting Errors + */ + public List executionErrors() { + return executionErrors; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("state", state) + .add("error", error) + .add("executionErrors", executionErrors) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(state, error, executionErrors); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof JobStatus && Objects.equals(toPb(), ((JobStatus) obj).toPb()); + } + + com.google.api.services.bigquery.model.JobStatus toPb() { + com.google.api.services.bigquery.model.JobStatus statusPb = + new com.google.api.services.bigquery.model.JobStatus(); + if (state != null) { + statusPb.setState(state.toString()); + } + if (error != null) { + statusPb.setErrorResult(error.toPb()); + } + if (executionErrors != null) { + statusPb.setErrors(Lists.transform(executionErrors, BigQueryError.TO_PB_FUNCTION)); + } + return statusPb; + } + + static JobStatus fromPb(com.google.api.services.bigquery.model.JobStatus statusPb) { + List allErrors = null; + if (statusPb.getErrors() != null) { + allErrors = Lists.transform(statusPb.getErrors(), BigQueryError.FROM_PB_FUNCTION); + } + BigQueryError error = + statusPb.getErrorResult() != null ? BigQueryError.fromPb(statusPb.getErrorResult()) : null; + return new JobStatus(State.valueOf(statusPb.getState()), error, allErrors); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadConfiguration.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadConfiguration.java new file mode 100644 index 000000000000..223a25a478e0 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadConfiguration.java @@ -0,0 +1,165 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; + +import java.util.List; + +/** + * Common interface for a load configuration. A load configuration + * ({@link WriteChannelConfiguration}) can be used to load data into a table with a + * {@link com.google.gcloud.WriteChannel} ({@link BigQuery#writer(WriteChannelConfiguration)}). + * A load configuration ({@link LoadJobConfiguration}) can also be used to create a load job + * ({@link JobInfo#of(JobConfiguration)}). + */ +public interface LoadConfiguration { + + interface Builder { + + /** + * Sets the destination table to load the data into. + */ + Builder destinationTable(TableId destinationTable); + + /** + * Sets whether the job is allowed to create new tables. + * + * @see + * Create Disposition + */ + Builder createDisposition(CreateDisposition createDisposition); + + /** + * Sets the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + Builder writeDisposition(WriteDisposition writeDisposition); + + /** + * Sets the source format, and possibly some parsing options, of the external data. Supported + * formats are {@code CSV}, {@code NEWLINE_DELIMITED_JSON} and {@code DATASTORE_BACKUP}. If not + * specified, {@code CSV} format is assumed. + * + * + * Source Format + */ + Builder formatOptions(FormatOptions formatOptions); + + /** + * Sets the maximum number of bad records that BigQuery can ignore when running the job. If the + * number of bad records exceeds this value, an invalid error is returned in the job result. + * By default no bad record is ignored. + */ + Builder maxBadRecords(Integer maxBadRecords); + + /** + * Sets the schema for the destination table. The schema can be omitted if the destination table + * already exists, or if you're loading data from a Google Cloud Datastore backup (i.e. + * {@code DATASTORE_BACKUP} format option). + */ + Builder schema(Schema schema); + + /** + * Sets whether BigQuery should allow extra values that are not represented in the table schema. + * If {@code true}, the extra values are ignored. If {@code false}, records with extra columns + * are treated as bad records, and if there are too many bad records, an invalid error is + * returned in the job result. By default unknown values are not allowed. + */ + Builder ignoreUnknownValues(Boolean ignoreUnknownValues); + + /** + * Sets which entity properties to load into BigQuery from a Cloud Datastore backup. This field + * is only used if the source format is set to {@code DATASTORE_BACKUP}. Property names are case + * sensitive and must be top-level properties. If no properties are specified, BigQuery loads + * all properties. If any named property isn't found in the Cloud Datastore backup, an invalid + * error is returned in the job result. + */ + Builder projectionFields(List projectionFields); + + LoadConfiguration build(); + } + + /** + * Returns the destination table to load the data into. + */ + TableId destinationTable(); + + /** + * Returns whether the job is allowed to create new tables. + * + * @see + * Create Disposition + */ + CreateDisposition createDisposition(); + + /** + * Returns the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + WriteDisposition writeDisposition(); + + /** + * Returns additional properties used to parse CSV data (used when {@link #format()} is set + * to CSV). Returns {@code null} if not set. + */ + CsvOptions csvOptions(); + + /** + * Returns the maximum number of bad records that BigQuery can ignore when running the job. If the + * number of bad records exceeds this value, an invalid error is returned in the job result. + * By default no bad record is ignored. + */ + Integer maxBadRecords(); + + /** + * Returns the schema for the destination table, if set. Returns {@code null} otherwise. + */ + Schema schema(); + + /** + * Returns the format of the data files. + */ + String format(); + + /** + * Returns whether BigQuery should allow extra values that are not represented in the table + * schema. If {@code true}, the extra values are ignored. If {@code true}, records with extra + * columns are treated as bad records, and if there are too many bad records, an invalid error is + * returned in the job result. By default unknown values are not allowed. + */ + Boolean ignoreUnknownValues(); + + /** + * Returns which entity properties to load into BigQuery from a Cloud Datastore backup. This field + * is only used if the source format is set to {@code DATASTORE_BACKUP}. Property names are case + * sensitive and must be top-level properties. If no properties are specified, BigQuery loads + * all properties. If any named property isn't found in the Cloud Datastore backup, an invalid + * error is returned in the job result. + */ + List projectionFields(); + + /** + * Returns a builder for the load configuration object. + */ + Builder toBuilder(); +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadJobConfiguration.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadJobConfiguration.java new file mode 100644 index 000000000000..1f98a3dfaca1 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadJobConfiguration.java @@ -0,0 +1,389 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.JobConfigurationLoad; +import com.google.common.base.MoreObjects.ToStringHelper; +import com.google.common.collect.ImmutableList; + +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery load job configuration. A load job loads data from one of several formats into a + * table. Data is provided as URIs that point to objects in Google Cloud Storage. Load job + * configurations have {@link JobConfiguration.Type#LOAD} type. + */ +public final class LoadJobConfiguration extends JobConfiguration implements LoadConfiguration { + + private static final long serialVersionUID = -2673554846792429829L; + + private final List sourceUris; + private final TableId destinationTable; + private final JobInfo.CreateDisposition createDisposition; + private final JobInfo.WriteDisposition writeDisposition; + private final FormatOptions formatOptions; + private final Integer maxBadRecords; + private final Schema schema; + private final Boolean ignoreUnknownValues; + private final List projectionFields; + + public static final class Builder + extends JobConfiguration.Builder + implements LoadConfiguration.Builder { + + private List sourceUris; + private TableId destinationTable; + private JobInfo.CreateDisposition createDisposition; + private JobInfo.WriteDisposition writeDisposition; + private FormatOptions formatOptions; + private Integer maxBadRecords; + private Schema schema; + private Boolean ignoreUnknownValues; + private List projectionFields; + + private Builder() { + super(Type.LOAD); + } + + private Builder(LoadJobConfiguration loadConfiguration) { + this(); + this.destinationTable = loadConfiguration.destinationTable; + this.createDisposition = loadConfiguration.createDisposition; + this.writeDisposition = loadConfiguration.writeDisposition; + this.formatOptions = loadConfiguration.formatOptions; + this.maxBadRecords = loadConfiguration.maxBadRecords; + this.schema = loadConfiguration.schema; + this.ignoreUnknownValues = loadConfiguration.ignoreUnknownValues; + this.projectionFields = loadConfiguration.projectionFields; + this.sourceUris = loadConfiguration.sourceUris; + } + + private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) { + this(); + JobConfigurationLoad loadConfigurationPb = configurationPb.getLoad(); + this.destinationTable = TableId.fromPb(loadConfigurationPb.getDestinationTable()); + if (loadConfigurationPb.getCreateDisposition() != null) { + this.createDisposition = + JobInfo.CreateDisposition.valueOf(loadConfigurationPb.getCreateDisposition()); + } + if (loadConfigurationPb.getWriteDisposition() != null) { + this.writeDisposition = + JobInfo.WriteDisposition.valueOf(loadConfigurationPb.getWriteDisposition()); + } + if (loadConfigurationPb.getSourceFormat() != null) { + this.formatOptions = FormatOptions.of(loadConfigurationPb.getSourceFormat()); + } + if (loadConfigurationPb.getAllowJaggedRows() != null + || loadConfigurationPb.getAllowQuotedNewlines() != null + || loadConfigurationPb.getEncoding() != null + || loadConfigurationPb.getFieldDelimiter() != null + || loadConfigurationPb.getQuote() != null + || loadConfigurationPb.getSkipLeadingRows() != null) { + CsvOptions.Builder builder = CsvOptions.builder() + .allowJaggedRows(loadConfigurationPb.getAllowJaggedRows()) + .allowQuotedNewLines(loadConfigurationPb.getAllowQuotedNewlines()) + .encoding(loadConfigurationPb.getEncoding()) + .fieldDelimiter(loadConfigurationPb.getFieldDelimiter()) + .quote(loadConfigurationPb.getQuote()) + .skipLeadingRows(loadConfigurationPb.getSkipLeadingRows()); + this.formatOptions = builder.build(); + } + this.maxBadRecords = loadConfigurationPb.getMaxBadRecords(); + if (loadConfigurationPb.getSchema() != null) { + this.schema = Schema.fromPb(loadConfigurationPb.getSchema()); + } + this.ignoreUnknownValues = loadConfigurationPb.getIgnoreUnknownValues(); + this.projectionFields = loadConfigurationPb.getProjectionFields(); + if (loadConfigurationPb.getSourceUris() != null) { + this.sourceUris = ImmutableList.copyOf(configurationPb.getLoad().getSourceUris()); + } + } + + @Override + public Builder destinationTable(TableId destinationTable) { + this.destinationTable = destinationTable; + return this; + } + + @Override + public Builder createDisposition(JobInfo.CreateDisposition createDisposition) { + this.createDisposition = createDisposition; + return this; + } + + @Override + public Builder writeDisposition(JobInfo.WriteDisposition writeDisposition) { + this.writeDisposition = writeDisposition; + return this; + } + + @Override + public Builder formatOptions(FormatOptions formatOptions) { + this.formatOptions = formatOptions; + return this; + } + + @Override + public Builder maxBadRecords(Integer maxBadRecords) { + this.maxBadRecords = maxBadRecords; + return this; + } + + @Override + public Builder schema(Schema schema) { + this.schema = schema; + return this; + } + + @Override + public Builder ignoreUnknownValues(Boolean ignoreUnknownValues) { + this.ignoreUnknownValues = ignoreUnknownValues; + return this; + } + + @Override + public Builder projectionFields(List projectionFields) { + this.projectionFields = + projectionFields != null ? ImmutableList.copyOf(projectionFields) : null; + return this; + } + + /** + * Sets the fully-qualified URIs that point to source data in Google Cloud Storage (e.g. + * gs://bucket/path). Each URI can contain one '*' wildcard character and it must come after the + * 'bucket' name. + */ + public Builder sourceUris(List sourceUris) { + this.sourceUris = ImmutableList.copyOf(checkNotNull(sourceUris)); + return this; + } + + @Override + public LoadJobConfiguration build() { + return new LoadJobConfiguration(this); + } + } + + private LoadJobConfiguration(Builder builder) { + super(builder); + this.sourceUris = builder.sourceUris; + this.destinationTable = builder.destinationTable; + this.createDisposition = builder.createDisposition; + this.writeDisposition = builder.writeDisposition; + this.formatOptions = builder.formatOptions; + this.maxBadRecords = builder.maxBadRecords; + this.schema = builder.schema; + this.ignoreUnknownValues = builder.ignoreUnknownValues; + this.projectionFields = builder.projectionFields; + } + + @Override + public TableId destinationTable() { + return destinationTable; + } + + @Override + public JobInfo.CreateDisposition createDisposition() { + return this.createDisposition; + } + + @Override + public JobInfo.WriteDisposition writeDisposition() { + return writeDisposition; + } + + @Override + public CsvOptions csvOptions() { + return formatOptions instanceof CsvOptions ? (CsvOptions) formatOptions : null; + } + + @Override + public Integer maxBadRecords() { + return maxBadRecords; + } + + @Override + public Schema schema() { + return schema; + } + + @Override + public String format() { + return formatOptions != null ? formatOptions.type() : null; + } + + @Override + public Boolean ignoreUnknownValues() { + return ignoreUnknownValues; + } + + @Override + public List projectionFields() { + return projectionFields; + } + + /** + * Returns the fully-qualified URIs that point to source data in Google Cloud Storage (e.g. + * gs://bucket/path). Each URI can contain one '*' wildcard character and it must come after the + * 'bucket' name. + */ + public List sourceUris() { + return sourceUris; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("destinationTable", destinationTable) + .add("createDisposition", createDisposition) + .add("writeDisposition", writeDisposition) + .add("formatOptions", formatOptions) + .add("maxBadRecords", maxBadRecords) + .add("schema", schema) + .add("ignoreUnknownValue", ignoreUnknownValues) + .add("projectionFields", projectionFields) + .add("sourceUris", sourceUris); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof LoadJobConfiguration && baseEquals((LoadJobConfiguration) obj); + } + + @Override + public int hashCode() { + return Objects.hash(baseHashCode(), sourceUris); + } + + @Override + LoadJobConfiguration setProjectId(String projectId) { + return toBuilder().destinationTable(destinationTable().setProjectId(projectId)).build(); + } + + com.google.api.services.bigquery.model.JobConfiguration toPb() { + JobConfigurationLoad loadConfigurationPb = new JobConfigurationLoad(); + loadConfigurationPb.setDestinationTable(destinationTable.toPb()); + if (createDisposition != null) { + loadConfigurationPb.setCreateDisposition(createDisposition.toString()); + } + if (writeDisposition != null) { + loadConfigurationPb.setWriteDisposition(writeDisposition.toString()); + } + if (csvOptions() != null) { + CsvOptions csvOptions = csvOptions(); + loadConfigurationPb.setFieldDelimiter(csvOptions.fieldDelimiter()) + .setAllowJaggedRows(csvOptions.allowJaggedRows()) + .setAllowQuotedNewlines(csvOptions.allowQuotedNewLines()) + .setEncoding(csvOptions.encoding()) + .setQuote(csvOptions.quote()) + .setSkipLeadingRows(csvOptions.skipLeadingRows()); + } + if (schema != null) { + loadConfigurationPb.setSchema(schema.toPb()); + } + if (formatOptions != null) { + loadConfigurationPb.setSourceFormat(formatOptions.type()); + } + loadConfigurationPb.setMaxBadRecords(maxBadRecords); + loadConfigurationPb.setIgnoreUnknownValues(ignoreUnknownValues); + loadConfigurationPb.setProjectionFields(projectionFields); + if (sourceUris != null) { + loadConfigurationPb.setSourceUris(ImmutableList.copyOf(sourceUris)); + } + return new com.google.api.services.bigquery.model.JobConfiguration() + .setLoad(loadConfigurationPb); + } + + /** + * Creates a builder for a BigQuery Load Job configuration given the destination table and source + * URIs. + */ + public static Builder builder(TableId destinationTable, List sourceUris) { + return new Builder().destinationTable(destinationTable).sourceUris(sourceUris); + } + + /** + * Creates a builder for a BigQuery Load Job configuration given the destination table and source + * URI. + */ + public static Builder builder(TableId destinationTable, String sourceUri) { + return builder(destinationTable, ImmutableList.of(sourceUri)); + } + + /** + * Creates a builder for a BigQuery Load Job configuration given the destination table, format and + * source URIs. + */ + public static Builder builder(TableId destinationTable, List sourceUris, + FormatOptions format) { + return builder(destinationTable, sourceUris).formatOptions(format); + } + + /** + * Creates a builder for a BigQuery Load Job configuration given the destination table, format and + * source URI. + */ + public static Builder builder(TableId destinationTable, String sourceUri, FormatOptions format) { + return builder(destinationTable, ImmutableList.of(sourceUri), format); + } + + /** + * Returns a BigQuery Load Job Configuration for the given destination table and source URIs. + */ + public static LoadJobConfiguration of(TableId destinationTable, List sourceUris) { + return builder(destinationTable, sourceUris).build(); + } + + /** + * Returns a BigQuery Load Job Configuration for the given destination table and source URI. + */ + public static LoadJobConfiguration of(TableId destinationTable, String sourceUri) { + return of(destinationTable, ImmutableList.of(sourceUri)); + } + + /** + * Returns a BigQuery Load Job Configuration for the given destination table, format and source + * URI. + */ + public static LoadJobConfiguration of(TableId destinationTable, List sourceUris, + FormatOptions format) { + return builder(destinationTable, sourceUris, format).build(); + } + + /** + * Returns a BigQuery Load Job Configuration for the given destination table, format and source + * URI. + */ + public static LoadJobConfiguration of(TableId destinationTable, String sourceUri, + FormatOptions format) { + return of(destinationTable, ImmutableList.of(sourceUri), format); + } + + @SuppressWarnings("unchecked") + static LoadJobConfiguration fromPb( + com.google.api.services.bigquery.model.JobConfiguration confPb) { + return new Builder(confPb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Option.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Option.java new file mode 100644 index 000000000000..d88820fe5a29 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Option.java @@ -0,0 +1,72 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.base.MoreObjects; +import com.google.gcloud.spi.BigQueryRpc; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Base class for BigQuery operation option. + */ +class Option implements Serializable { + + private static final long serialVersionUID = -6647817677804099207L; + + private final BigQueryRpc.Option rpcOption; + private final Object value; + + Option(BigQueryRpc.Option rpcOption, Object value) { + this.rpcOption = checkNotNull(rpcOption); + this.value = value; + } + + BigQueryRpc.Option rpcOption() { + return rpcOption; + } + + Object value() { + return value; + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Option)) { + return false; + } + Option other = (Option) obj; + return Objects.equals(rpcOption, other.rpcOption) + && Objects.equals(value, other.value); + } + + @Override + public int hashCode() { + return Objects.hash(rpcOption, value); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("name", rpcOption.value()) + .add("value", value) + .toString(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobConfiguration.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobConfiguration.java new file mode 100644 index 000000000000..630a3d5b9088 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobConfiguration.java @@ -0,0 +1,536 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.JobConfigurationQuery; +import com.google.common.base.MoreObjects.ToStringHelper; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; + +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Google BigQuery Query Job configuration. A Query Job runs a query against BigQuery data. Query + * job configurations have {@link JobConfiguration.Type#QUERY} type. + */ +public final class QueryJobConfiguration extends JobConfiguration { + + private static final long serialVersionUID = -1108948249081804890L; + + /** + * Priority levels for a query. If not specified the priority is assumed to be + * {@link Priority#INTERACTIVE}. + */ + public enum Priority { + /** + * Query is executed as soon as possible and count towards the + * concurrent rate limit and the daily + * rate limit. + */ + INTERACTIVE, + + /** + * Query is queued and started as soon as idle resources are available, usually within a few + * minutes. If the query hasn't started within 3 hours, its priority is changed to + * {@link Priority#INTERACTIVE}. + */ + BATCH + } + + private final String query; + private final TableId destinationTable; + private final Map tableDefinitions; + private final List userDefinedFunctions; + private final CreateDisposition createDisposition; + private final WriteDisposition writeDisposition; + private final DatasetId defaultDataset; + private final Priority priority; + private final Boolean allowLargeResults; + private final Boolean useQueryCache; + private final Boolean flattenResults; + private final Boolean dryRun; + + public static final class Builder + extends JobConfiguration.Builder { + + private String query; + private TableId destinationTable; + private Map tableDefinitions; + private List userDefinedFunctions; + private CreateDisposition createDisposition; + private WriteDisposition writeDisposition; + private DatasetId defaultDataset; + private Priority priority; + private Boolean allowLargeResults; + private Boolean useQueryCache; + private Boolean flattenResults; + private Boolean dryRun; + + private Builder() { + super(Type.QUERY); + } + + private Builder(QueryJobConfiguration jobConfiguration) { + this(); + this.query = jobConfiguration.query; + this.destinationTable = jobConfiguration.destinationTable; + this.tableDefinitions = jobConfiguration.tableDefinitions; + this.userDefinedFunctions = jobConfiguration.userDefinedFunctions; + this.createDisposition = jobConfiguration.createDisposition; + this.writeDisposition = jobConfiguration.writeDisposition; + this.defaultDataset = jobConfiguration.defaultDataset; + this.priority = jobConfiguration.priority; + this.allowLargeResults = jobConfiguration.allowLargeResults; + this.useQueryCache = jobConfiguration.useQueryCache; + this.flattenResults = jobConfiguration.flattenResults; + this.dryRun = jobConfiguration.dryRun; + } + + private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) { + this(); + JobConfigurationQuery queryConfigurationPb = configurationPb.getQuery(); + this.query = queryConfigurationPb.getQuery(); + allowLargeResults = queryConfigurationPb.getAllowLargeResults(); + useQueryCache = queryConfigurationPb.getUseQueryCache(); + flattenResults = queryConfigurationPb.getFlattenResults(); + dryRun = configurationPb.getDryRun(); + if (queryConfigurationPb.getDestinationTable() != null) { + destinationTable = TableId.fromPb(queryConfigurationPb.getDestinationTable()); + } + if (queryConfigurationPb.getDefaultDataset() != null) { + defaultDataset = DatasetId.fromPb(queryConfigurationPb.getDefaultDataset()); + } + if (queryConfigurationPb.getPriority() != null) { + priority = Priority.valueOf(queryConfigurationPb.getPriority()); + } + if (queryConfigurationPb.getTableDefinitions() != null) { + tableDefinitions = Maps.transformValues(queryConfigurationPb.getTableDefinitions(), + ExternalDataConfiguration.FROM_PB_FUNCTION); + } + if (queryConfigurationPb.getUserDefinedFunctionResources() != null) { + userDefinedFunctions = Lists.transform( + queryConfigurationPb.getUserDefinedFunctionResources(), + UserDefinedFunction.FROM_PB_FUNCTION); + } + if (queryConfigurationPb.getCreateDisposition() != null) { + createDisposition = + CreateDisposition.valueOf(queryConfigurationPb.getCreateDisposition()); + } + if (queryConfigurationPb.getWriteDisposition() != null) { + writeDisposition = + WriteDisposition.valueOf(queryConfigurationPb.getWriteDisposition()); + } + } + + /** + * Sets the BigQuery SQL query to execute. + */ + public Builder query(String query) { + this.query = query; + return this; + } + + /** + * Sets the table where to put query results. If not provided a new table is created. This value + * is required if {@link Builder#allowLargeResults(Boolean)} is set to {@code true}. + */ + public Builder destinationTable(TableId destinationTable) { + this.destinationTable = destinationTable; + return this; + } + + /** + * Sets the external tables definitions. If querying external data sources outside of BigQuery, + * this value describes the data format, location and other properties of the data + * sources. By defining these properties, the data sources can be queried as if they were + * standard BigQuery tables. + */ + public Builder tableDefinitions(Map tableDefinitions) { + this.tableDefinitions = tableDefinitions != null ? Maps.newHashMap(tableDefinitions) : null; + return this; + } + + /** + * Adds a new external table definition. If a definition already exists for {@code tableName} + * it is updated. + * + * @param tableName name of the table + * @param tableDefinition external data configuration for the table used by this query + */ + public Builder addTableDefinition(String tableName, ExternalDataConfiguration tableDefinition) { + if (this.tableDefinitions == null) { + this.tableDefinitions = Maps.newHashMap(); + } + this.tableDefinitions.put(checkNotNull(tableName), checkNotNull(tableDefinition)); + return this; + } + + /** + * Sets user defined function resources that can be used by this query. Function resources + * can either be defined inline ({@link UserDefinedFunction#inline(String)}) or loaded from + * a Google Cloud Storage URI ({@link UserDefinedFunction#fromUri(String)}. + */ + public Builder userDefinedFunctions(List userDefinedFunctions) { + this.userDefinedFunctions = + userDefinedFunctions != null ? ImmutableList.copyOf(userDefinedFunctions) : null; + return this; + } + + /** + * Sets whether the job is allowed to create tables. + * + * @see + * Create Disposition + */ + public Builder createDisposition(CreateDisposition createDisposition) { + this.createDisposition = createDisposition; + return this; + } + + /** + * Sets the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + public Builder writeDisposition(WriteDisposition writeDisposition) { + this.writeDisposition = writeDisposition; + return this; + } + + /** + * Sets the default dataset. This dataset is used for all unqualified table names used in the + * query. + */ + public Builder defaultDataset(DatasetId defaultDataset) { + this.defaultDataset = defaultDataset; + return this; + } + + /** + * Sets the default dataset. This dataset is used for all unqualified table names used in the + * query. + */ + public Builder defaultDataset(String defaultDataset) { + return defaultDataset(DatasetId.of(defaultDataset)); + } + + /** + * Sets a priority for the query. If not specified the priority is assumed to be + * {@link Priority#INTERACTIVE}. + */ + public Builder priority(Priority priority) { + this.priority = priority; + return this; + } + + /** + * Sets whether the job is enabled to create arbitrarily large results. If {@code true} + * the query is allowed to create large results at a slight cost in performance. If {@code true} + * {@link Builder#destinationTable(TableId)} must be provided. + * + * @see + * Returning Large Query Results + */ + public Builder allowLargeResults(Boolean allowLargeResults) { + this.allowLargeResults = allowLargeResults; + return this; + } + + /** + * Sets whether to look for the result in the query cache. The query cache is a best-effort + * cache that will be flushed whenever tables in the query are modified. Moreover, the query + * cache is only available when {@link Builder#destinationTable(TableId)} is not set. + * + * @see Query Caching + */ + public Builder useQueryCache(Boolean useQueryCache) { + this.useQueryCache = useQueryCache; + return this; + } + + /** + * Sets whether nested and repeated fields should be flattened. If set to {@code false} + * {@link Builder#allowLargeResults(Boolean)} must be {@code true}. By default results are + * flattened. + * + * @see Flatten + */ + public Builder flattenResults(Boolean flattenResults) { + this.flattenResults = flattenResults; + return this; + } + + /** + * Sets whether the job has to be dry run or not. If set, the job is not executed. A valid query + * will return a mostly empty response with some processing statistics, while an invalid query + * will return the same error it would if it wasn't a dry run. + */ + public Builder dryRun(Boolean dryRun) { + this.dryRun = dryRun; + return this; + } + + public QueryJobConfiguration build() { + return new QueryJobConfiguration(this); + } + } + + private QueryJobConfiguration(Builder builder) { + super(builder); + this.query = checkNotNull(builder.query); + this.allowLargeResults = builder.allowLargeResults; + this.createDisposition = builder.createDisposition; + this.defaultDataset = builder.defaultDataset; + this.destinationTable = builder.destinationTable; + this.flattenResults = builder.flattenResults; + this.priority = builder.priority; + this.useQueryCache = builder.useQueryCache; + this.userDefinedFunctions = builder.userDefinedFunctions; + this.writeDisposition = builder.writeDisposition; + this.tableDefinitions = + builder.tableDefinitions != null ? ImmutableMap.copyOf(builder.tableDefinitions) : null; + this.dryRun = builder.dryRun; + } + + /** + * Returns whether the job is enabled to create arbitrarily large results. If {@code true} + * the query is allowed to create large results at a slight cost in performance. + * the query is allowed to create large results at a slight cost in performance. + * + * @see + * Returning Large Query Results + */ + public Boolean allowLargeResults() { + return allowLargeResults; + } + + /** + * Returns whether the job is allowed to create new tables. + * + * @see + * Create Disposition + */ + public CreateDisposition createDisposition() { + return createDisposition; + } + + /** + * Returns the default dataset. This dataset is used for all unqualified table names used in the + * query. + */ + public DatasetId defaultDataset() { + return defaultDataset; + } + + /** + * Returns the table where to put query results. If not provided a new table is created. This + * value is required if {@link #allowLargeResults()} is {@code true}. + */ + public TableId destinationTable() { + return destinationTable; + } + + /** + * Returns whether nested and repeated fields should be flattened. If set to {@code false} + * {@link Builder#allowLargeResults(Boolean)} must be {@code true}. + * + * @see Flatten + */ + public Boolean flattenResults() { + return flattenResults; + } + + /** + * Returns the query priority. + */ + public Priority priority() { + return priority; + } + + /** + * Returns the Google BigQuery SQL query. + */ + public String query() { + return query; + } + + /** + * Returns the external tables definitions. If querying external data sources outside of BigQuery, + * this value describes the data format, location and other properties of the data + * sources. By defining these properties, the data sources can be queried as if they were + * standard BigQuery tables. + */ + public Map tableDefinitions() { + return tableDefinitions; + } + + /** + * Returns whether to look for the result in the query cache. The query cache is a best-effort + * cache that will be flushed whenever tables in the query are modified. Moreover, the query + * cache is only available when {@link Builder#destinationTable(TableId)} is not set. + * + * @see Query Caching + */ + public Boolean useQueryCache() { + return useQueryCache; + } + + /** + * Returns user defined function resources that can be used by this query. Function resources + * can either be defined inline ({@link UserDefinedFunction.Type#INLINE}) or loaded from + * a Google Cloud Storage URI ({@link UserDefinedFunction.Type#FROM_URI}. + */ + public List userDefinedFunctions() { + return userDefinedFunctions; + } + + /** + * Returns the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + public WriteDisposition writeDisposition() { + return writeDisposition; + } + + /** + * Returns whether the job has to be dry run or not. If set, the job is not executed. A valid + * query will return a mostly empty response with some processing statistics, while an invalid + * query will return the same error it would if it wasn't a dry run. + */ + public Boolean dryRun() { + return dryRun; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("query", query) + .add("destinationTable", destinationTable) + .add("defaultDataset", defaultDataset) + .add("allowLargeResults", allowLargeResults) + .add("flattenResults", flattenResults) + .add("priority", priority) + .add("tableDefinitions", tableDefinitions) + .add("userQueryCache", useQueryCache) + .add("userDefinedFunctions", userDefinedFunctions) + .add("createDisposition", createDisposition) + .add("writeDisposition", writeDisposition) + .add("dryRun", dryRun); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof QueryJobConfiguration && baseEquals((QueryJobConfiguration) obj); + } + + @Override + public int hashCode() { + return Objects.hash(baseHashCode(), allowLargeResults, createDisposition, destinationTable, + defaultDataset, flattenResults, priority, query, tableDefinitions, useQueryCache, + userDefinedFunctions, writeDisposition, dryRun); + } + + @Override + QueryJobConfiguration setProjectId(String projectId) { + Builder builder = toBuilder(); + if (destinationTable() != null) { + builder.destinationTable(destinationTable().setProjectId(projectId)); + } + if (defaultDataset() != null) { + builder.defaultDataset(defaultDataset().setProjectId(projectId)); + } + return builder.build(); + } + + com.google.api.services.bigquery.model.JobConfiguration toPb() { + com.google.api.services.bigquery.model.JobConfiguration configurationPb = + new com.google.api.services.bigquery.model.JobConfiguration(); + JobConfigurationQuery queryConfigurationPb = new JobConfigurationQuery(); + queryConfigurationPb.setQuery(query); + configurationPb.setDryRun(dryRun()); + if (allowLargeResults != null) { + queryConfigurationPb.setAllowLargeResults(allowLargeResults); + } + if (createDisposition != null) { + queryConfigurationPb.setCreateDisposition(createDisposition.toString()); + } + if (destinationTable != null) { + queryConfigurationPb.setDestinationTable(destinationTable.toPb()); + } + if (defaultDataset != null) { + queryConfigurationPb.setDefaultDataset(defaultDataset.toPb()); + } + if (flattenResults != null) { + queryConfigurationPb.setFlattenResults(flattenResults); + } + if (priority != null) { + queryConfigurationPb.setPriority(priority.toString()); + } + if (tableDefinitions != null) { + queryConfigurationPb.setTableDefinitions( + Maps.transformValues(tableDefinitions, ExternalDataConfiguration.TO_PB_FUNCTION)); + } + if (useQueryCache != null) { + queryConfigurationPb.setUseQueryCache(useQueryCache); + } + if (userDefinedFunctions != null) { + queryConfigurationPb.setUserDefinedFunctionResources( + Lists.transform(userDefinedFunctions, UserDefinedFunction.TO_PB_FUNCTION)); + } + if (writeDisposition != null) { + queryConfigurationPb.setWriteDisposition(writeDisposition.toString()); + } + return configurationPb.setQuery(queryConfigurationPb); + } + + /** + * Creates a builder for a BigQuery Query Job given the query to be run. + */ + public static Builder builder(String query) { + return new Builder().query(query); + } + + /** + * Returns a BigQuery Copy Job for the given the query to be run. Job's id is chosen by the + * service. + */ + public static QueryJobConfiguration of(String query) { + return builder(query).build(); + } + + @SuppressWarnings("unchecked") + static QueryJobConfiguration fromPb( + com.google.api.services.bigquery.model.JobConfiguration jobPb) { + return new Builder(jobPb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryRequest.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryRequest.java new file mode 100644 index 000000000000..0bcfb3d4a9ae --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryRequest.java @@ -0,0 +1,318 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.base.MoreObjects; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Google Cloud BigQuery Query Request. This class can be used to run a BigQuery SQL query and + * return results if the query completes within a specified timeout. The query results are saved to + * a temporary table that is deleted approximately 24 hours after the query is run. The query is run + * through a BigQuery Job whose identity can be accessed via {@link QueryResponse#jobId()}. If the + * query does not complete within the provided {@link Builder#maxWaitTime(Long)}, the response + * returned by {@link BigQuery#query(QueryRequest)} will have {@link QueryResponse#jobCompleted()} + * set to {@code false} and {@link QueryResponse#result()} set to {@code null}. To obtain query + * results you can use {@link BigQuery#getQueryResults(JobId, BigQuery.QueryResultsOption...)} until + * {@link QueryResponse#jobCompleted()} returns {@code true}. + * + *

Example usage of a query request: + *

    {@code
+ *    // Substitute "field", "table" and "dataset" with real field, table and dataset identifiers
+ *    QueryRequest request = QueryRequest.builder("SELECT field FROM table")
+ *      .defaultDataset(DatasetId.of("dataset"))
+ *      .maxWaitTime(60000L)
+ *      .maxResults(1000L)
+ *      .build();
+ *    QueryResponse response = bigquery.query(request);
+ *    while (!response.jobCompleted()) {
+ *      Thread.sleep(1000);
+ *      response = bigquery.getQueryResults(response.jobId());
+ *    }
+ *    List executionErrors = response.executionErrors();
+ *    // look for errors in executionErrors
+ *    QueryResult result = response.result();
+ *    Iterator> rowIterator = result.iterateAll();
+ *    while(rowIterator.hasNext()) {
+ *      List row = rowIterator.next();
+ *      // do something with row
+ *    }
+ * }
+ * + * @see Query + * @see Query Reference + */ +public class QueryRequest implements Serializable { + + private static final long serialVersionUID = -8727328332415880852L; + + private final String query; + private final Long maxResults; + private final DatasetId defaultDataset; + private final Long maxWaitTime; + private final Boolean dryRun; + private final Boolean useQueryCache; + + public static final class Builder { + + private String query; + private Long maxResults; + private DatasetId defaultDataset; + private Long maxWaitTime; + private Boolean dryRun; + private Boolean useQueryCache; + + private Builder() {} + + /** + * Sets the BigQuery query to be executed. + */ + public Builder query(String query) { + this.query = checkNotNull(query); + return this; + } + + /** + * Sets the maximum number of rows of data to return per page of results. Setting this flag to a + * small value such as 1000 and then paging through results might improve reliability when the + * query result set is large. In addition to this limit, responses are also limited to 10 MB. + * By default, there is no maximum row count, and only the byte limit applies. + */ + public Builder maxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + /** + * Sets the default dataset to assume for any unqualified table names in the query. + */ + public Builder defaultDataset(DatasetId defaultDataset) { + this.defaultDataset = defaultDataset; + return this; + } + + /** + * Sets the default dataset to assume for any unqualified table names in the query. + */ + public Builder defaultDataset(String defaultDataset) { + return defaultDataset(DatasetId.of(defaultDataset)); + } + + /** + * Sets how long to wait for the query to complete, in milliseconds, before the request times + * out and returns. Note that this is only a timeout for the request, not the query. If the + * query takes longer to run than the timeout value, the call returns without any results and + * with the {@link QueryResponse#jobCompleted()} set to {@code false}. If not set, a wait time + * of 10000 milliseconds (10 seconds) is used. + */ + public Builder maxWaitTime(Long maxWaitTime) { + this.maxWaitTime = maxWaitTime; + return this; + } + + /** + * Sets whether the query has to be dry run or not. If set, the query is not executed. If the + * query is valid statistics are returned on how many bytes would be processed. If the query is + * invalid an error is returned. If not set the query is executed. + */ + public Builder dryRun(Boolean dryRun) { + this.dryRun = dryRun; + return this; + } + + /** + * Sets whether to look for the result in the query cache. The query cache is a best-effort + * cache that will be flushed whenever tables in the query are modified. If not specified the + * query cache is used. + * + * @see Query Caching + */ + public Builder useQueryCache(Boolean useQueryCache) { + this.useQueryCache = useQueryCache; + return this; + } + + public QueryRequest build() { + return new QueryRequest(this); + } + } + + private QueryRequest(Builder builder) { + query = builder.query; + maxResults = builder.maxResults; + defaultDataset = builder.defaultDataset; + maxWaitTime = builder.maxWaitTime; + dryRun = builder.dryRun; + useQueryCache = builder.useQueryCache; + } + + /** + * Sets the BigQuery query to be executed. + */ + public String query() { + return query; + } + + /** + * Returns the maximum number of rows of data to return per page of results. + */ + public Long maxResults() { + return maxResults; + } + + /** + * Returns the default dataset to assume for any unqualified table names in the query. + */ + public DatasetId defaultDataset() { + return defaultDataset; + } + + /** + * Returns how long to wait for the query to complete, in milliseconds, before the request times + * out and returns. Note that this is only a timeout for the request, not the query. If the + * query takes longer to run than the timeout value, the call returns without any results and + * with the {@link QueryResponse#jobCompleted()} set to {@code false}. You can call + * {@link BigQuery#getQueryResults(JobId, BigQuery.QueryResultsOption...)} to wait for the query + * to complete and read the results. If not set, a wait time of 10000 milliseconds (10 seconds) + * is used. + */ + public Long maxWaitTime() { + return maxWaitTime; + } + + /** + * Returns whether the query has to be dry run or not. If set, the query is not executed. If the + * query is valid statistics are returned on how many bytes would be processed. If the query is + * invalid an error is returned. If not set the query is executed. + */ + public Boolean dryRun() { + return dryRun; + } + + /** + * Returns whether to look for the result in the query cache. The query cache is a best-effort + * cache that will be flushed whenever tables in the query are modified. If not specified the + * query cache is used. + * + * @see Query Caching + */ + public Boolean useQueryCache() { + return useQueryCache; + } + + /** + * Returns a builder for the {@code QueryRequest} object. + */ + public Builder toBuilder() { + return new Builder() + .query(query) + .maxResults(maxResults) + .defaultDataset(defaultDataset) + .maxWaitTime(maxWaitTime) + .dryRun(dryRun) + .useQueryCache(useQueryCache); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("query", query) + .add("maxResults", maxResults) + .add("defaultDataset", defaultDataset) + .add("maxWaitTime", maxWaitTime) + .add("dryRun", dryRun) + .add("useQueryCache", useQueryCache) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(query, maxResults, defaultDataset, maxWaitTime, dryRun, useQueryCache); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof QueryRequest && Objects.equals(toPb(), ((QueryRequest) obj).toPb()); + } + + QueryRequest setProjectId(String projectId) { + Builder builder = toBuilder(); + if (defaultDataset() != null) { + builder.defaultDataset(defaultDataset().setProjectId(projectId)); + } + return builder.build(); + } + + com.google.api.services.bigquery.model.QueryRequest toPb() { + com.google.api.services.bigquery.model.QueryRequest queryRequestPb = + new com.google.api.services.bigquery.model.QueryRequest().setQuery(query); + if (maxResults != null) { + queryRequestPb.setMaxResults(maxResults); + } + if (defaultDataset != null) { + queryRequestPb.setDefaultDataset(defaultDataset.toPb()); + } + if (maxWaitTime != null) { + queryRequestPb.setTimeoutMs(maxWaitTime); + } + if (dryRun != null) { + queryRequestPb.setDryRun(dryRun); + } + if (useQueryCache != null) { + queryRequestPb.setUseQueryCache(useQueryCache); + } + return queryRequestPb; + } + + /** + * Creates a builder for a {@code QueryRequest} given the BigQuery SQL query to be executed. + */ + public static Builder builder(String query) { + return new Builder().query(query); + } + + /** + * Creates a {@code QueryRequest} object given the BigQuery SQL query to be executed. + */ + public static QueryRequest of(String query) { + return new Builder().query(query).build(); + } + + static QueryRequest fromPb(com.google.api.services.bigquery.model.QueryRequest queryRequestPb) { + Builder builder = builder(queryRequestPb.getQuery()); + if (queryRequestPb.getMaxResults() != null) { + builder.maxResults(queryRequestPb.getMaxResults()); + } + if (queryRequestPb.getDefaultDataset() != null) { + builder.defaultDataset(DatasetId.fromPb(queryRequestPb.getDefaultDataset())); + } + if (queryRequestPb.getTimeoutMs() != null) { + builder.maxWaitTime(queryRequestPb.getTimeoutMs()); + } + if (queryRequestPb.getDryRun() != null) { + builder.dryRun(queryRequestPb.getDryRun()); + } + if (queryRequestPb.getUseQueryCache() != null) { + builder.useQueryCache(queryRequestPb.getUseQueryCache()); + } + return builder.build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResponse.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResponse.java new file mode 100644 index 000000000000..77386747754f --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResponse.java @@ -0,0 +1,196 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * Google Cloud BigQuery Query Response. This class contains the results of a Query Job + * ({@link BigQuery#getQueryResults(JobId, BigQuery.QueryResultsOption...)}) or of a + * Query Request ({@link BigQuery#query(QueryRequest)}). + * + *

Example usage of a query response: + *

    {@code
+ *    QueryResponse response = bigquery.query(request);
+ *    while (!response.jobCompleted()) {
+ *      Thread.sleep(1000);
+ *      response = bigquery.getQueryResults(response.jobId());
+ *    }
+ *    List executionErrors = response.executionErrors();
+ *    // look for errors in executionErrors
+ *    QueryResult result = response.result();
+ *    Iterator> rowIterator = result.iterateAll();
+ *    while(rowIterator.hasNext()) {
+ *      List row = rowIterator.next();
+ *      // do something with row
+ *    }
+ * }
+ * + * @see Get Query + * Results + * @see Query + */ +public class QueryResponse implements Serializable { + + private static final long serialVersionUID = 3549226764825005655L; + + private final QueryResult result; + private final String etag; + private final JobId jobId; + private final boolean jobCompleted; + private final List executionErrors; + + static final class Builder { + + private QueryResult result; + private String etag; + private JobId jobId; + private boolean jobCompleted; + private List executionErrors; + + private Builder() {} + + Builder result(QueryResult result) { + this.result = result; + return this; + } + + Builder etag(String etag) { + this.etag = etag; + return this; + } + + Builder jobId(JobId jobId) { + this.jobId = jobId; + return this; + } + + Builder jobCompleted(boolean jobCompleted) { + this.jobCompleted = jobCompleted; + return this; + } + + Builder executionErrors(List executionErrors) { + this.executionErrors = executionErrors; + return this; + } + + QueryResponse build() { + return new QueryResponse(this); + } + } + + private QueryResponse(Builder builder) { + this.result = builder.result; + this.etag = builder.etag; + this.jobId = builder.jobId; + this.jobCompleted = builder.jobCompleted; + this.executionErrors = builder.executionErrors != null ? builder.executionErrors + : ImmutableList.of(); + } + + /** + * Returns the result of the query. Returns {@code null} if {@link #jobCompleted()} is {@code + * false}. + */ + public QueryResult result() { + return result; + } + + /** + * Returns the hash of the {@code QueryResponse} resource or {@code null} if not set. + */ + public String etag() { + return etag; + } + + /** + * Returns the identity of the BigQuery Job that was created to run the query. This field will be + * present even if the original request timed out. + */ + public JobId jobId() { + return jobId; + } + + /** + * Returns whether the job running the query has completed or not. If {@link #result()} is not + * {@code null}, this method will always return {@code true}. If this method returns {@code false} + * {@link #result()} returns {@code null}. This method can be used to check if query execution + * completed and results are available. + */ + public boolean jobCompleted() { + return jobCompleted; + } + + /** + * Returns whether errors and warnings occurred during the execution of the job. If this method + * returns {@code true} it does not necessarily mean that the job has completed or was + * unsuccessful. + */ + public boolean hasErrors() { + return !executionErrors.isEmpty(); + } + + /** + * Returns errors and warnings encountered during the running of the job, if any. Errors here do + * not necessarily mean that the job has completed or was unsuccessful. + */ + public List executionErrors() { + return executionErrors; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("result", result) + .add("etag", etag) + .add("jobId", jobId) + .add("jobCompleted", jobCompleted) + .add("executionErrors", executionErrors) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(jobId); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + QueryResponse response = (QueryResponse) obj; + return jobCompleted == response.jobCompleted + && Objects.equals(etag, response.etag) + && Objects.equals(result, response.result) + && Objects.equals(jobId, response.jobId) + && Objects.equals(executionErrors, response.executionErrors); + } + + static Builder builder() { + return new Builder(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResult.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResult.java new file mode 100644 index 000000000000..692abab937a9 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResult.java @@ -0,0 +1,176 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.gcloud.PageImpl; + +import java.util.List; +import java.util.Objects; + +public class QueryResult extends PageImpl> { + + private static final long serialVersionUID = -4831062717210349818L; + + private final boolean cacheHit; + private final Schema schema; + private final long totalRows; + private final long totalBytesProcessed; + + interface QueryResultsPageFetcher extends PageImpl.NextPageFetcher> { + @Override + QueryResult nextPage(); + } + + static final class Builder { + + private QueryResultsPageFetcher pageFetcher; + private String cursor; + private Iterable> results; + private boolean cacheHit; + private Schema schema; + private long totalRows; + private long totalBytesProcessed; + + private Builder() {} + + Builder cacheHit(boolean cacheHit) { + this.cacheHit = cacheHit; + return this; + } + + Builder schema(Schema schema) { + this.schema = schema; + return this; + } + + Builder totalBytesProcessed(long totalBytesProcessed) { + this.totalBytesProcessed = totalBytesProcessed; + return this; + } + + Builder totalRows(long totalRows) { + this.totalRows = totalRows; + return this; + } + + Builder pageFetcher(QueryResultsPageFetcher pageFetcher) { + this.pageFetcher = pageFetcher; + return this; + } + + Builder cursor(String cursor) { + this.cursor = cursor; + return this; + } + + Builder results(Iterable> results) { + this.results = results; + return this; + } + + QueryResult build() { + return new QueryResult(this); + } + } + + private QueryResult(Builder builder) { + super(builder.pageFetcher, builder.cursor, builder.results != null ? builder.results + : ImmutableList.>of()); + this.cacheHit = builder.cacheHit; + this.schema = builder.schema; + this.totalBytesProcessed = builder.totalBytesProcessed; + this.totalRows = builder.totalRows; + } + + /** + * Returns whether the query result was fetched from the query cache. + * + * @see Query Caching + */ + public boolean cacheHit() { + return cacheHit; + } + + /** + * Returns the schema of the results. This is present only when the query completes successfully. + */ + public Schema schema() { + return schema; + } + + /** + * Returns the total number of bytes processed for the query. If this query was a dry run, this is + * the number of bytes that would be processed if the query were run. + */ + public long totalBytesProcessed() { + return totalBytesProcessed; + } + + /** + * Returns the total number of rows in the complete query result set, which can be more than the + * number of rows in the first page of results returned by {@link #values()}. Returns {@code 0} + * if the query was a dry run. + */ + public long totalRows() { + return totalRows; + } + + @Override + public QueryResult nextPage() { + return (QueryResult) super.nextPage(); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("rows", values()) + .add("cacheHit", cacheHit) + .add("schema", schema) + .add("totalBytesProcessed", totalBytesProcessed) + .add("totalRows", totalRows) + .add("cursor", nextPageCursor()) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), cacheHit, schema, totalBytesProcessed, totalRows); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + QueryResult response = (QueryResult) obj; + return Objects.equals(nextPageCursor(), response.nextPageCursor()) + && Objects.equals(values(), response.values()) + && Objects.equals(schema, response.schema) + && totalRows == response.totalRows + && totalBytesProcessed == response.totalBytesProcessed + && cacheHit == response.cacheHit; + } + + static Builder builder() { + return new Builder(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryStage.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryStage.java new file mode 100644 index 000000000000..8c9f91fd39f3 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryStage.java @@ -0,0 +1,444 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.api.services.bigquery.model.ExplainQueryStage; +import com.google.api.services.bigquery.model.ExplainQueryStep; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * BigQuery provides diagnostic information about a completed query's execution plan (or query plan + * for short). The query plan describes a query as a series of stages, with each stage comprising a + * number of steps that read from data sources, perform a series of transformations on the input, + * and emit an output to a future stage (or the final result). This class contains information on a + * query stage. + * + * @see Query Plan + */ +public class QueryStage implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public QueryStage apply(ExplainQueryStage pb) { + return QueryStage.fromPb(pb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public ExplainQueryStage apply(QueryStage stage) { + return stage.toPb(); + } + }; + private static final long serialVersionUID = -472281297327952320L; + + /** + * Each query stage is made of a number of steps. This class contains information on a query step. + * + * @see Steps + * Metadata + */ + public static class QueryStep implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public QueryStep apply(ExplainQueryStep pb) { + return QueryStep.fromPb(pb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public ExplainQueryStep apply(QueryStep stage) { + return stage.toPb(); + } + }; + private static final long serialVersionUID = 8663444604771794411L; + + private final String name; + private final List substeps; + + QueryStep(String name, List substeps) { + this.name = name; + this.substeps = substeps; + } + + /** + * Returns a machine-readable name for the operation. + * + * @see Steps + * Metadata + */ + public String name() { + return name; + } + + /** + * Returns a list of human-readable stage descriptions. + */ + public List substeps() { + return substeps; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("name", name) + .add("substeps", substeps) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(name, substeps); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof QueryStep)) { + return false; + } + QueryStep other = (QueryStep) obj; + return Objects.equals(name, other.name) && Objects.equals(substeps, other.substeps); + } + + ExplainQueryStep toPb() { + return new ExplainQueryStep().setKind(name).setSubsteps(substeps); + } + + static QueryStep fromPb(com.google.api.services.bigquery.model.ExplainQueryStep stepPb) { + return new QueryStep(stepPb.getKind(), ImmutableList.copyOf(stepPb.getSubsteps() != null + ? stepPb.getSubsteps() : ImmutableList.of())); + } + } + + private final double computeRatioAvg; + private final double computeRatioMax; + private final long id; + private final String name; + private final double readRatioAvg; + private final double readRatioMax; + private final long recordsRead; + private final long recordsWritten; + private final List steps; + private final double waitRatioAvg; + private final double waitRatioMax; + private final double writeRatioAvg; + private final double writeRatioMax; + + static final class Builder { + + private double computeRatioAvg; + private double computeRatioMax; + private long id; + private String name; + private double readRatioAvg; + private double readRatioMax; + private long recordsRead; + private long recordsWritten; + private List steps; + private double waitRatioAvg; + private double waitRatioMax; + private double writeRatioAvg; + private double writeRatioMax; + + private Builder() {} + + Builder computeRatioAvg(double computeRatioAvg) { + this.computeRatioAvg = computeRatioAvg; + return this; + } + + Builder computeRatioMax(double computeRatioMax) { + this.computeRatioMax = computeRatioMax; + return this; + } + + Builder id(long id) { + this.id = id; + return this; + } + + Builder name(String name) { + this.name = name; + return this; + } + + Builder readRatioAvg(double readRatioAvg) { + this.readRatioAvg = readRatioAvg; + return this; + } + + Builder readRatioMax(double readRatioMax) { + this.readRatioMax = readRatioMax; + return this; + } + + Builder recordsRead(long recordsRead) { + this.recordsRead = recordsRead; + return this; + } + + Builder recordsWritten(long recordsWritten) { + this.recordsWritten = recordsWritten; + return this; + } + + Builder steps(List steps) { + this.steps = steps; + return this; + } + + Builder waitRatioAvg(double waitRatioAvg) { + this.waitRatioAvg = waitRatioAvg; + return this; + } + + Builder waitRatioMax(double waitRatioMax) { + this.waitRatioMax = waitRatioMax; + return this; + } + + Builder writeRatioAvg(double writeRatioAvg) { + this.writeRatioAvg = writeRatioAvg; + return this; + } + + Builder writeRatioMax(double writeRatioMax) { + this.writeRatioMax = writeRatioMax; + return this; + } + + QueryStage build() { + return new QueryStage(this); + } + } + + QueryStage(Builder builder) { + computeRatioAvg = builder.computeRatioAvg; + computeRatioMax = builder.computeRatioMax; + id = builder.id; + name = builder.name; + readRatioAvg = builder.readRatioAvg; + readRatioMax = builder.readRatioMax; + recordsRead = builder.recordsRead; + recordsWritten = builder.recordsWritten; + steps = builder.steps; + waitRatioAvg = builder.waitRatioAvg; + waitRatioMax = builder.waitRatioMax; + writeRatioAvg = builder.writeRatioAvg; + writeRatioMax = builder.writeRatioMax; + } + + /** + * Returns the time the average worker spent CPU-bound, divided by the longest time spent by any + * worker in any segment. + */ + public double computeRatioAvg() { + return computeRatioAvg; + } + + /** + * Returns the time the slowest worker spent CPU-bound, divided by the longest time spent by any + * worker in any segment. + */ + public double computeRatioMax() { + return computeRatioMax; + } + + /** + * Returns a unique ID for the stage within its plan. + */ + public long id() { + return id; + } + + /** + * Returns a human-readable name for the stage. + */ + public String name() { + return name; + } + + /** + * Returns the time the average worker spent reading input data, divided by the longest time spent + * by any worker in any segment. + */ + public double readRatioAvg() { + return readRatioAvg; + } + + /** + * Returns the time the slowest worker spent reading input data, divided by the longest time spent + * by any worker in any segment. + */ + public double readRatioMax() { + return readRatioMax; + } + + /** + * Returns the number of rows (top-level records) read by the stage. + */ + public long recordsRead() { + return recordsRead; + } + + /** + * Returns the number of rows (top-level records) written by the stage. + */ + public long recordsWritten() { + return recordsWritten; + } + + /** + * Returns the list of steps within the stage in dependency order (approximately chronological). + */ + public List steps() { + return steps; + } + + /** + * Returns the time the average worker spent waiting to be scheduled, divided by the longest time + * spent by any worker in any segment. + */ + public double waitRatioAvg() { + return waitRatioAvg; + } + + /** + * Returns the time the slowest worker spent waiting to be scheduled, divided by the longest time + * spent by any worker in any segment. + */ + public double waitRatioMax() { + return waitRatioMax; + } + + /** + * Returns the time the average worker spent writing output data, divided by the longest time + * spent by any worker in any segment. + */ + public double writeRatioAvg() { + return writeRatioAvg; + } + + /** + * Returns the time the slowest worker spent writing output data, divided by the longest time + * spent by any worker in any segment. + */ + public double writeRatioMax() { + return writeRatioMax; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("computeRatioAvg", computeRatioAvg) + .add("computeRatioMax", computeRatioMax) + .add("id", id) + .add("name", name) + .add("readRatioAvg", readRatioAvg) + .add("readRatioMax", readRatioMax) + .add("recordsRead", recordsRead) + .add("recordsWritten", recordsWritten) + .add("steps", steps) + .add("waitRatioAvg", waitRatioAvg) + .add("waitRatioMax", waitRatioMax) + .add("writeRatioAvg", writeRatioAvg) + .add("writeRatioMax", writeRatioMax) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(computeRatioAvg, computeRatioMax, id, name, readRatioAvg, readRatioMax, + recordsRead, recordsWritten, steps, waitRatioAvg, waitRatioMax, writeRatioAvg); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof QueryStage)) { + return false; + } + QueryStage other = (QueryStage) obj; + return id == other.id + && computeRatioAvg == other.computeRatioAvg + && computeRatioMax == other.computeRatioMax + && readRatioAvg == other.readRatioAvg + && readRatioMax == other.readRatioMax + && recordsRead == other.recordsRead + && recordsWritten == other.recordsWritten + && waitRatioAvg == other.waitRatioAvg + && waitRatioMax == other.waitRatioMax + && writeRatioAvg == other.writeRatioAvg + && writeRatioMax == other.writeRatioMax + && Objects.equals(steps, other.steps) + && Objects.equals(name, other.name); + } + + static Builder builder() { + return new Builder(); + } + + ExplainQueryStage toPb() { + ExplainQueryStage stagePb = new ExplainQueryStage() + .setComputeRatioAvg(computeRatioAvg) + .setComputeRatioMax(computeRatioMax) + .setId(id) + .setName(name) + .setReadRatioAvg(readRatioAvg) + .setReadRatioMax(readRatioMax) + .setRecordsRead(recordsRead) + .setRecordsWritten(recordsWritten) + .setWaitRatioAvg(waitRatioAvg) + .setWaitRatioMax(waitRatioMax) + .setWriteRatioAvg(writeRatioAvg) + .setWriteRatioMax(writeRatioMax); + if (steps != null) { + stagePb.setSteps(Lists.transform(steps, QueryStep.TO_PB_FUNCTION)); + } + return stagePb; + } + + static QueryStage fromPb(com.google.api.services.bigquery.model.ExplainQueryStage stagePb) { + Builder builder = new QueryStage.Builder(); + builder.computeRatioAvg(stagePb.getComputeRatioAvg()); + builder.computeRatioMax(stagePb.getComputeRatioMax()); + builder.id(stagePb.getId()); + builder.name(stagePb.getName()); + builder.readRatioAvg(stagePb.getReadRatioAvg()); + builder.readRatioMax(stagePb.getReadRatioMax()); + builder.recordsRead(stagePb.getRecordsRead()); + builder.recordsWritten(stagePb.getRecordsWritten()); + if (stagePb.getSteps() != null) { + builder.steps(Lists.transform(stagePb.getSteps(), QueryStep.FROM_PB_FUNCTION)); + } + builder.waitRatioAvg(stagePb.getWaitRatioAvg()); + builder.waitRatioMax(stagePb.getWaitRatioMax()); + builder.writeRatioAvg(stagePb.getWriteRatioAvg()); + builder.writeRatioMax(stagePb.getWriteRatioMax()); + return builder.build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Schema.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Schema.java new file mode 100644 index 000000000000..787bb0d7f35f --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Schema.java @@ -0,0 +1,159 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.TableFieldSchema; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * This class represents the schema for a Google BigQuery Table or data source. + */ +public class Schema implements Serializable { + + static final Function + FROM_PB_FUNCTION = new Function() { + @Override + public Schema apply(com.google.api.services.bigquery.model.TableSchema pb) { + return Schema.fromPb(pb); + } + }; + static final Function + TO_PB_FUNCTION = new Function() { + @Override + public com.google.api.services.bigquery.model.TableSchema apply(Schema schema) { + return schema.toPb(); + } + }; + + private static final long serialVersionUID = 2007400596384553696L; + + private final List fields; + + public static final class Builder { + + private List fields; + + private Builder() {} + + /** + * Adds a field's schema to the table's schema. + */ + public Builder addField(Field field) { + if (fields == null) { + fields = Lists.newArrayList(); + } + fields.add(checkNotNull(field)); + return this; + } + + /** + * Sets table's schema fields. + */ + public Builder fields(Iterable fields) { + this.fields = Lists.newArrayList(checkNotNull(fields)); + return this; + } + + /** + * Sets table's schema fields. + */ + public Builder fields(Field... fields) { + this.fields = Lists.newArrayList(fields); + return this; + } + + /** + * Creates an {@code Schema} object. + */ + public Schema build() { + return new Schema(this); + } + } + + private Schema(Builder builder) { + this.fields = builder.fields != null ? ImmutableList.copyOf(builder.fields) + : ImmutableList.of(); + } + + /** + * Returns the fields in the current table schema. + */ + public List fields() { + return fields; + } + + /** + * Returns a builder for the {@code Schema} object. + */ + public Builder toBuilder() { + return builder().fields(fields); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("fields", fields) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(fields); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof Schema && Objects.equals(toPb(), ((Schema) obj).toPb()); + } + + com.google.api.services.bigquery.model.TableSchema toPb() { + com.google.api.services.bigquery.model.TableSchema tableSchemaPb = + new com.google.api.services.bigquery.model.TableSchema(); + if (fields != null) { + List fieldsPb = Lists.transform(fields, Field.TO_PB_FUNCTION); + tableSchemaPb.setFields(fieldsPb); + } + return tableSchemaPb; + } + + public static Builder builder() { + return new Builder(); + } + + public static Schema of(Iterable fields) { + return builder().fields(fields).build(); + } + + public static Schema of(Field... fields) { + return builder().fields(fields).build(); + } + + static Schema fromPb(com.google.api.services.bigquery.model.TableSchema tableSchemaPb) { + return Schema.of(Lists.transform(tableSchemaPb.getFields(), Field.FROM_PB_FUNCTION)); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Table.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Table.java new file mode 100644 index 000000000000..1344b31c9b68 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Table.java @@ -0,0 +1,276 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.collect.ImmutableList; +import com.google.gcloud.Page; + +import java.util.List; +import java.util.Objects; + +/** + * A Google BigQuery Table. + * + *

Objects of this class are immutable. Operations that modify the table like {@link #update} + * return a new object. To get a {@code Table} object with the most recent information use + * {@link #reload}. + *

+ */ +public final class Table { + + private final BigQuery bigquery; + private final BaseTableInfo info; + + /** + * Constructs a {@code Table} object for the provided {@code TableInfo}. The BigQuery service + * is used to issue requests. + * + * @param bigquery the BigQuery service used for issuing requests + * @param info table's info + */ + public Table(BigQuery bigquery, BaseTableInfo info) { + this.bigquery = checkNotNull(bigquery); + this.info = checkNotNull(info); + } + + /** + * Creates a {@code Table} object for the provided dataset and table's user-defined ids. Performs + * an RPC call to get the latest table information. + * + * @param bigquery the BigQuery service used for issuing requests + * @param dataset the dataset's user-defined id + * @param table the table's user-defined id + * @param options table options + * @return the {@code Table} object or {@code null} if not found + * @throws BigQueryException upon failure + */ + public static Table get(BigQuery bigquery, String dataset, String table, + BigQuery.TableOption... options) { + return get(bigquery, TableId.of(dataset, table), options); + } + + /** + * Creates a {@code Table} object for the provided table identity. Performs an RPC call to get the + * latest table information. + * + * @param bigquery the BigQuery service used for issuing requests + * @param table the table's identity + * @param options table options + * @return the {@code Table} object or {@code null} if not found + * @throws BigQueryException upon failure + */ + public static Table get(BigQuery bigquery, TableId table, BigQuery.TableOption... options) { + BaseTableInfo info = bigquery.getTable(table, options); + return info != null ? new Table(bigquery, info) : null; + } + + /** + * Returns the table's information. + */ + public BaseTableInfo info() { + return info; + } + + /** + * Checks if this table exists. + * + * @return {@code true} if this table exists, {@code false} otherwise + * @throws BigQueryException upon failure + */ + public boolean exists() { + return bigquery.getTable(info.tableId(), BigQuery.TableOption.fields()) != null; + } + + /** + * Fetches current table's latest information. Returns {@code null} if the table does not exist. + * + * @param options table options + * @return a {@code Table} object with latest information or {@code null} if not found + * @throws BigQueryException upon failure + */ + public Table reload(BigQuery.TableOption... options) { + return Table.get(bigquery, info.tableId(), options); + } + + /** + * Updates the table's information. Dataset's and table's user-defined ids cannot be changed. A + * new {@code Table} object is returned. + * + * @param tableInfo new table's information. Dataset's and table's user-defined ids must match the + * ones of the current table + * @param options dataset options + * @return a {@code Table} object with updated information + * @throws BigQueryException upon failure + */ + public Table update(BaseTableInfo tableInfo, BigQuery.TableOption... options) { + checkArgument(Objects.equals(tableInfo.tableId().dataset(), + info.tableId().dataset()), "Dataset's user-defined ids must match"); + checkArgument(Objects.equals(tableInfo.tableId().table(), + info.tableId().table()), "Table's user-defined ids must match"); + return new Table(bigquery, bigquery.update(tableInfo, options)); + } + + /** + * Deletes this table. + * + * @return {@code true} if table was deleted, {@code false} if it was not found + * @throws BigQueryException upon failure + */ + public boolean delete() { + return bigquery.delete(info.tableId()); + } + + /** + * Insert rows into the table. + * + * @param rows rows to be inserted + * @throws BigQueryException upon failure + */ + InsertAllResponse insert(Iterable rows) throws BigQueryException { + return bigquery.insertAll(InsertAllRequest.of(info.tableId(), rows)); + } + + /** + * Insert rows into the table. + * + * @param rows rows to be inserted + * @param skipInvalidRows whether to insert all valid rows, even if invalid rows exist. If not set + * the entire insert operation will fail if rows to be inserted contain an invalid row + * @param ignoreUnknownValues whether to accept rows that contain values that do not match the + * schema. The unknown values are ignored. If not set, rows with unknown values are considered + * to be invalid + * @throws BigQueryException upon failure + */ + InsertAllResponse insert(Iterable rows, boolean skipInvalidRows, + boolean ignoreUnknownValues) throws BigQueryException { + InsertAllRequest request = InsertAllRequest.builder(info.tableId(), rows) + .skipInvalidRows(skipInvalidRows) + .ignoreUnknownValues(ignoreUnknownValues) + .build(); + return bigquery.insertAll(request); + } + + /** + * Returns the paginated list rows in this table. + * + * @param options table data list options + * @throws BigQueryException upon failure + */ + Page> list(BigQuery.TableDataListOption... options) throws BigQueryException { + return bigquery.listTableData(info.tableId(), options); + } + + /** + * Starts a BigQuery Job to copy the current table to the provided destination table. Returns the + * started {@link Job} object. + * + * @param destinationDataset the user-defined id of the destination dataset + * @param destinationTable the user-defined id of the destination table + * @param options job options + * @throws BigQueryException upon failure + */ + Job copy(String destinationDataset, String destinationTable, BigQuery.JobOption... options) + throws BigQueryException { + return copy(TableId.of(destinationDataset, destinationTable), options); + } + + /** + * Starts a BigQuery Job to copy the current table to the provided destination table. Returns the + * started {@link Job} object. + * + * @param destinationTable the destination table of the copy job + * @param options job options + * @throws BigQueryException upon failure + */ + Job copy(TableId destinationTable, BigQuery.JobOption... options) throws BigQueryException { + CopyJobConfiguration configuration = CopyJobConfiguration.of(destinationTable, info.tableId()); + return new Job(bigquery, bigquery.create(JobInfo.of(configuration), options)); + } + + /** + * Starts a BigQuery Job to extract the current table to the provided destination URI. Returns the + * started {@link Job} object. + * + * @param format the format of the extracted data + * @param destinationUri the fully-qualified Google Cloud Storage URI (e.g. gs://bucket/path) + * where the extracted table should be written + * @param options job options + * @throws BigQueryException upon failure + */ + Job extract(String format, String destinationUri, BigQuery.JobOption... options) + throws BigQueryException { + return extract(format, ImmutableList.of(destinationUri), options); + } + + /** + * Starts a BigQuery Job to extract the current table to the provided destination URIs. Returns + * the started {@link Job} object. + * + * @param format the format of the exported data + * @param destinationUris the fully-qualified Google Cloud Storage URIs (e.g. gs://bucket/path) + * where the extracted table should be written + * @param options job options + * @throws BigQueryException upon failure + */ + Job extract(String format, List destinationUris, BigQuery.JobOption... options) + throws BigQueryException { + ExtractJobConfiguration extractConfiguration = + ExtractJobConfiguration.of(info.tableId(), destinationUris, format); + return new Job(bigquery, bigquery.create(JobInfo.of(extractConfiguration), options)); + } + + /** + * Starts a BigQuery Job to load data into the current table from the provided source URI. Returns + * the started {@link Job} object. + * + * @param format the format of the data to load + * @param sourceUri the fully-qualified Google Cloud Storage URI (e.g. gs://bucket/path) from + * which to load the data + * @param options job options + * @throws BigQueryException upon failure + */ + Job load(FormatOptions format, String sourceUri, BigQuery.JobOption... options) + throws BigQueryException { + return load(format, ImmutableList.of(sourceUri), options); + } + + /** + * Starts a BigQuery Job to load data into the current table from the provided source URIs. + * Returns the started {@link Job} object. + * + * @param format the format of the exported data + * @param sourceUris the fully-qualified Google Cloud Storage URIs (e.g. gs://bucket/path) from + * which to load the data + * @param options job options + * @throws BigQueryException upon failure + */ + Job load(FormatOptions format, List sourceUris, BigQuery.JobOption... options) + throws BigQueryException { + LoadJobConfiguration loadConfig = LoadJobConfiguration.of(info.tableId(), sourceUris, format); + return new Job(bigquery, bigquery.create(JobInfo.of(loadConfig), options)); + } + + /** + * Returns the table's {@code BigQuery} object used to issue requests. + */ + public BigQuery bigquery() { + return bigquery; + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableDataWriteChannel.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableDataWriteChannel.java new file mode 100644 index 000000000000..bee0340a29a8 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableDataWriteChannel.java @@ -0,0 +1,94 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.gcloud.RetryHelper.runWithRetries; +import static java.util.concurrent.Executors.callable; + +import com.google.gcloud.BaseWriteChannel; +import com.google.gcloud.RestorableState; +import com.google.gcloud.RetryHelper; +import com.google.gcloud.WriteChannel; + +/** + * WriteChannel implementation to stream data into a BigQuery table. + */ +class TableDataWriteChannel extends BaseWriteChannel { + + TableDataWriteChannel(BigQueryOptions options, + WriteChannelConfiguration writeChannelConfiguration) { + this(options, writeChannelConfiguration, options.rpc().open(writeChannelConfiguration.toPb())); + } + + TableDataWriteChannel(BigQueryOptions options, WriteChannelConfiguration config, + String uploadId) { + super(options, config, uploadId); + } + + @Override + protected void flushBuffer(final int length, final boolean last) { + try { + runWithRetries(callable(new Runnable() { + @Override + public void run() { + options().rpc().write(uploadId(), buffer(), 0, position(), length, last); + } + }), options().retryParams(), BigQueryImpl.EXCEPTION_HANDLER); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + protected StateImpl.Builder stateBuilder() { + return StateImpl.builder(options(), entity(), uploadId()); + } + + static class StateImpl + extends BaseWriteChannel.BaseState { + + private static final long serialVersionUID = -787362105981823738L; + + StateImpl(Builder builder) { + super(builder); + } + + static class Builder + extends BaseWriteChannel.BaseState.Builder { + + private Builder(BigQueryOptions options, WriteChannelConfiguration configuration, + String uploadId) { + super(options, configuration, uploadId); + } + + public RestorableState build() { + return new StateImpl(this); + } + } + + static Builder builder(BigQueryOptions options, WriteChannelConfiguration config, + String uploadId) { + return new Builder(options, config, uploadId); + } + + @Override + public WriteChannel restore() { + TableDataWriteChannel channel = new TableDataWriteChannel(serviceOptions, entity, uploadId); + channel.restore(this); + return channel; + } + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableId.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableId.java new file mode 100644 index 000000000000..20ed53cc1a5d --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableId.java @@ -0,0 +1,122 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.TableReference; +import com.google.common.base.Function; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Google BigQuery Table identity. + */ +public class TableId implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public TableId apply(TableReference pb) { + return TableId.fromPb(pb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public TableReference apply(TableId tableId) { + return tableId.toPb(); + } + }; + private static final long serialVersionUID = -6186254820908152300L; + + private final String project; + private final String dataset; + private final String table; + + /** + * Returns project's user-defined id. + */ + public String project() { + return project; + } + + /** + * Returns dataset's user-defined id. + */ + public String dataset() { + return dataset; + } + + /** + * Returns table's user-defined id. + */ + public String table() { + return table; + } + + private TableId(String project, String dataset, String table) { + this.project = project; + this.dataset = dataset; + this.table = table; + } + + /** + * Creates a table identity given project's, dataset's and table's user-defined ids. + */ + public static TableId of(String project, String dataset, String table) { + return new TableId(checkNotNull(project), checkNotNull(dataset), checkNotNull(table)); + } + + /** + * Creates a table identity given dataset's and table's user-defined ids. + */ + public static TableId of(String dataset, String table) { + return new TableId(null, checkNotNull(dataset), checkNotNull(table)); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof TableId && Objects.equals(toPb(), ((TableId) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(project, dataset, table); + } + + @Override + public String toString() { + return toPb().toString(); + } + + TableId setProjectId(String projectId) { + return project() != null ? this : TableId.of(projectId, dataset(), table()); + } + + TableReference toPb() { + return new TableReference().setProjectId(project).setDatasetId(dataset).setTableId(table); + } + + static TableId fromPb(TableReference tableRef) { + return new TableId( + tableRef.getProjectId(), + tableRef.getDatasetId(), + tableRef.getTableId()); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java new file mode 100644 index 000000000000..aeb1eadd9771 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java @@ -0,0 +1,240 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.api.services.bigquery.model.Streamingbuffer; +import com.google.api.services.bigquery.model.Table; +import com.google.common.base.MoreObjects; +import com.google.common.base.MoreObjects.ToStringHelper; + +import java.io.Serializable; +import java.math.BigInteger; +import java.util.Objects; + +/** + * A Google BigQuery Table information. A BigQuery table is a standard, two-dimensional table with + * individual records organized in rows, and a data type assigned to each column (also called a + * field). Individual fields within a record may contain nested and repeated children fields. Every + * table is described by a schema that describes field names, types, and other information. + * + * @see Managing Tables + */ +public class TableInfo extends BaseTableInfo { + + private static final long serialVersionUID = -5910575573063546949L; + + private final String location; + private final StreamingBuffer streamingBuffer; + + /** + * Google BigQuery Table's Streaming Buffer information. This class contains information on a + * table's streaming buffer as the estimated size in number of rows/bytes. + */ + public static class StreamingBuffer implements Serializable { + + private static final long serialVersionUID = -6713971364725267597L; + private final long estimatedRows; + private final long estimatedBytes; + private final long oldestEntryTime; + + StreamingBuffer(long estimatedRows, long estimatedBytes, long oldestEntryTime) { + this.estimatedRows = estimatedRows; + this.estimatedBytes = estimatedBytes; + this.oldestEntryTime = oldestEntryTime; + } + + /** + * Returns a lower-bound estimate of the number of rows currently in the streaming buffer. + */ + public long estimatedRows() { + return estimatedRows; + } + + /** + * Returns a lower-bound estimate of the number of bytes currently in the streaming buffer. + */ + public long estimatedBytes() { + return estimatedBytes; + } + + /** + * Returns the timestamp of the oldest entry in the streaming buffer, in milliseconds since + * epoch. + */ + public long oldestEntryTime() { + return oldestEntryTime; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("estimatedRows", estimatedRows) + .add("estimatedBytes", estimatedBytes) + .add("oldestEntryTime", oldestEntryTime) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(estimatedRows, estimatedBytes, oldestEntryTime); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof StreamingBuffer + && Objects.equals(toPb(), ((StreamingBuffer) obj).toPb()); + } + + Streamingbuffer toPb() { + return new Streamingbuffer() + .setEstimatedBytes(BigInteger.valueOf(estimatedBytes)) + .setEstimatedRows(BigInteger.valueOf(estimatedRows)) + .setOldestEntryTime(BigInteger.valueOf(oldestEntryTime)); + } + + static StreamingBuffer fromPb(Streamingbuffer streamingBufferPb) { + return new StreamingBuffer(streamingBufferPb.getEstimatedRows().longValue(), + streamingBufferPb.getEstimatedBytes().longValue(), + streamingBufferPb.getOldestEntryTime().longValue()); + } + } + + public static final class Builder extends BaseTableInfo.Builder { + + private String location; + private StreamingBuffer streamingBuffer; + + private Builder() {} + + private Builder(TableInfo tableInfo) { + super(tableInfo); + this.location = tableInfo.location; + this.streamingBuffer = tableInfo.streamingBuffer; + } + + protected Builder(Table tablePb) { + super(tablePb); + this.location = tablePb.getLocation(); + if (tablePb.getStreamingBuffer() != null) { + this.streamingBuffer = StreamingBuffer.fromPb(tablePb.getStreamingBuffer()); + } + } + + Builder location(String location) { + this.location = location; + return self(); + } + + Builder streamingBuffer(StreamingBuffer streamingBuffer) { + this.streamingBuffer = streamingBuffer; + return self(); + } + + /** + * Creates a {@code TableInfo} object. + */ + @Override + public TableInfo build() { + return new TableInfo(this); + } + } + + private TableInfo(Builder builder) { + super(builder); + this.location = builder.location; + this.streamingBuffer = builder.streamingBuffer; + } + + /** + * Returns the geographic location where the table should reside. This value is inherited from the + * dataset. + * + * @see + * Dataset Location + */ + public String location() { + return location; + } + + /** + * Returns information on the table's streaming buffer if any exists. Returns {@code null} if no + * streaming buffer exists. + */ + public StreamingBuffer streamingBuffer() { + return streamingBuffer; + } + + /** + * Returns a builder for a BigQuery Table. + * + * @param tableId table id + * @param schema the schema of the table + */ + public static Builder builder(TableId tableId, Schema schema) { + return new Builder().tableId(tableId).type(Type.TABLE).schema(schema); + } + + /** + * Creates BigQuery table given its type. + * + * @param tableId table id + * @param schema the schema of the table + */ + public static TableInfo of(TableId tableId, Schema schema) { + return builder(tableId, schema).build(); + } + + /** + * Returns a builder for the {@code TableInfo} object. + */ + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("location", location) + .add("streamingBuffer", streamingBuffer); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof TableInfo && baseEquals((TableInfo) obj); + } + + @Override + public int hashCode() { + return Objects.hash(baseHashCode(), location, streamingBuffer); + } + + @Override + Table toPb() { + Table tablePb = super.toPb(); + tablePb.setLocation(location); + if (streamingBuffer != null) { + tablePb.setStreamingBuffer(streamingBuffer.toPb()); + } + return tablePb; + } + + @SuppressWarnings("unchecked") + static TableInfo fromPb(Table tablePb) { + return new Builder(tablePb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/UserDefinedFunction.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/UserDefinedFunction.java new file mode 100644 index 000000000000..2135e0ddc941 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/UserDefinedFunction.java @@ -0,0 +1,151 @@ +package com.google.gcloud.bigquery; + +import com.google.api.services.bigquery.model.UserDefinedFunctionResource; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Google BigQuery User Defined Function. BigQuery supports user-defined functions (UDFs) written in + * JavaScript. A UDF is similar to the "Map" function in a MapReduce: it takes a single row as input + * and produces zero or more rows as output. The output can potentially have a different schema than + * the input. + * + * @see User-Defined Functions + * + */ +public abstract class UserDefinedFunction implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public UserDefinedFunction apply(UserDefinedFunctionResource userDefinedFunctionPb) { + return UserDefinedFunction.fromPb(userDefinedFunctionPb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public UserDefinedFunctionResource apply(UserDefinedFunction userDefinedFunction) { + return userDefinedFunction.toPb(); + } + }; + + private static final long serialVersionUID = 8704260561787440287L; + + /** + * Type of user-defined function. User defined functions can be provided inline as code blobs + * ({@link #INLINE}) or as a Google Cloud Storage URI ({@link #FROM_URI}). + */ + public enum Type { + INLINE, + FROM_URI + } + + private final Type type; + private final String content; + + UserDefinedFunction(Type type, String content) { + this.type = type; + this.content = content; + } + + public Type type() { + return type; + } + + /** + * If {@link #type()} is {@link Type#INLINE} this method returns a code blob. If {@link #type()} + * is {@link Type#FROM_URI} the method returns a Google Cloud Storage URI (e.g. gs://bucket/path). + */ + public String content() { + return content; + } + + /** + * A Google Cloud BigQuery user-defined function, as a code blob. + */ + static final class InlineFunction extends UserDefinedFunction { + + private static final long serialVersionUID = 1083672109192091686L; + + InlineFunction(String inlineCode) { + super(Type.INLINE, inlineCode); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this).add("inlineCode", content()).toString(); + } + + @Override + public com.google.api.services.bigquery.model.UserDefinedFunctionResource toPb() { + return new com.google.api.services.bigquery.model.UserDefinedFunctionResource() + .setInlineCode(content()); + } + } + + /** + * A Google Cloud BigQuery user-defined function, as an URI to Google Cloud Storage. + */ + static final class UriFunction extends UserDefinedFunction { + + private static final long serialVersionUID = 4660331691852223839L; + + UriFunction(String functionUri) { + super(Type.FROM_URI, functionUri); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this).add("functionUri", content()).toString(); + } + + @Override + public com.google.api.services.bigquery.model.UserDefinedFunctionResource toPb() { + return new com.google.api.services.bigquery.model.UserDefinedFunctionResource() + .setResourceUri(content()); + } + } + + @Override + public int hashCode() { + return Objects.hash(type, content); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof UserDefinedFunction + && Objects.equals(toPb(), ((UserDefinedFunction) obj).toPb()); + } + + public abstract com.google.api.services.bigquery.model.UserDefinedFunctionResource toPb(); + + /** + * Creates a Google Cloud BigQuery user-defined function given a code blob. + */ + public static UserDefinedFunction inline(String functionDefinition) { + return new InlineFunction(functionDefinition); + } + + /** + * Creates a Google Cloud BigQuery user-defined function given a Google Cloud Storage URI (e.g. + * gs://bucket/path). + */ + public static UserDefinedFunction fromUri(String functionDefinition) { + return new UriFunction(functionDefinition); + } + + static UserDefinedFunction fromPb( + com.google.api.services.bigquery.model.UserDefinedFunctionResource pb) { + if (pb.getInlineCode() != null) { + return new InlineFunction(pb.getInlineCode()); + } + if (pb.getResourceUri() != null) { + return new UriFunction(pb.getResourceUri()); + } + throw new IllegalArgumentException("Invalid user-defined function"); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ViewInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ViewInfo.java new file mode 100644 index 000000000000..2698921bc034 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ViewInfo.java @@ -0,0 +1,245 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.Table; +import com.google.api.services.bigquery.model.ViewDefinition; +import com.google.common.base.MoreObjects.ToStringHelper; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery View Table information. BigQuery's views are logical views, not materialized + * views, which means that the query that defines the view is re-executed every time the view is + * queried. + * + * @see Views + */ +public class ViewInfo extends BaseTableInfo { + + private static final long serialVersionUID = 7567772157817454901L; + + private final String query; + private final List userDefinedFunctions; + + public static final class Builder extends BaseTableInfo.Builder { + + private String query; + private List userDefinedFunctions; + + private Builder() {} + + private Builder(ViewInfo viewInfo) { + super(viewInfo); + this.query = viewInfo.query; + this.userDefinedFunctions = viewInfo.userDefinedFunctions; + } + + protected Builder(Table tablePb) { + super(tablePb); + ViewDefinition viewPb = tablePb.getView(); + if (viewPb != null) { + this.query = viewPb.getQuery(); + if (viewPb.getUserDefinedFunctionResources() != null) { + this.userDefinedFunctions = Lists.transform(viewPb.getUserDefinedFunctionResources(), + UserDefinedFunction.FROM_PB_FUNCTION); + } + } + } + + /** + * Sets the query used to create the view. + */ + public Builder query(String query) { + this.query = checkNotNull(query); + return self(); + } + + /** + * Sets user defined functions that can be used by {@link #query()}. + * + * @see User-Defined + * Functions + */ + public Builder userDefinedFunctions(List userDefinedFunctions) { + this.userDefinedFunctions = ImmutableList.copyOf(checkNotNull(userDefinedFunctions)); + return self(); + } + + /** + * Sets user defined functions that can be used by {@link #query()}. + * + * @see User-Defined + * Functions + */ + public Builder userDefinedFunctions(UserDefinedFunction... userDefinedFunctions) { + this.userDefinedFunctions = ImmutableList.copyOf(userDefinedFunctions); + return self(); + } + + /** + * Creates a {@code ViewInfo} object. + */ + @Override + public ViewInfo build() { + return new ViewInfo(this); + } + } + + private ViewInfo(Builder builder) { + super(builder); + this.query = builder.query; + this.userDefinedFunctions = builder.userDefinedFunctions; + } + + /** + * Returns the query used to create the view. + */ + public String query() { + return query; + } + + /** + * Returns user defined functions that can be used by {@link #query()}. Returns {@code null} if + * not set. + * + * @see User-Defined Functions + * + */ + public List userDefinedFunctions() { + return userDefinedFunctions; + } + + /** + * Returns a builder for the {@code ViewInfo} object. + */ + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("query", query) + .add("userDefinedFunctions", userDefinedFunctions); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof ViewInfo && baseEquals((ViewInfo) obj); + } + + @Override + public int hashCode() { + return Objects.hash(baseHashCode(), query, userDefinedFunctions); + } + + @Override + Table toPb() { + Table tablePb = super.toPb(); + ViewDefinition viewDefinition = new ViewDefinition().setQuery(query); + if (userDefinedFunctions != null) { + viewDefinition.setUserDefinedFunctionResources(Lists.transform(userDefinedFunctions, + UserDefinedFunction.TO_PB_FUNCTION)); + } + tablePb.setView(viewDefinition); + return tablePb; + } + + /** + * Returns a builder for a BigQuery View Table. + * + * @param tableId table id + * @param query the query used to generate the table + */ + public static Builder builder(TableId tableId, String query) { + return new Builder().tableId(tableId).type(Type.VIEW).query(query); + } + + /** + * Returns a builder for a BigQuery View Table. + * + * @param table table id + * @param query the query used to generate the table + * @param functions user-defined functions that can be used by the query + */ + public static Builder builder(TableId table, String query, List functions) { + return new Builder() + .tableId(table) + .type(Type.VIEW) + .userDefinedFunctions(functions) + .query(query); + } + + /** + * Returns a builder for a BigQuery View Table. + * + * @param table table id + * @param query the query used to generate the table + * @param functions user-defined functions that can be used by the query + */ + public static Builder builder(TableId table, String query, UserDefinedFunction... functions) { + return new Builder() + .tableId(table) + .type(Type.VIEW) + .userDefinedFunctions(functions) + .query(query); + } + + /** + * Creates a BigQuery View given table identity and query. + * + * @param tableId table id + * @param query the query used to generate the table + */ + public static ViewInfo of(TableId tableId, String query) { + return builder(tableId, query).build(); + } + + /** + * Creates a BigQuery View given table identity, a query and some user-defined functions. + * + * @param table table id + * @param query the query used to generate the table + * @param functions user-defined functions that can be used by the query + */ + public static ViewInfo of(TableId table, String query, List functions) { + return builder(table, query, functions).build(); + } + + /** + * Creates a BigQuery View given table identity, a query and some user-defined functions. + * + * @param table table id + * @param query the query used to generate the table + * @param functions user-defined functions that can be used by the query + */ + public static ViewInfo of(TableId table, String query, UserDefinedFunction... functions) { + return builder(table, query, functions).build(); + } + + @SuppressWarnings("unchecked") + static ViewInfo fromPb(Table tablePb) { + return new Builder(tablePb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/WriteChannelConfiguration.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/WriteChannelConfiguration.java new file mode 100644 index 000000000000..18342bac1bff --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/WriteChannelConfiguration.java @@ -0,0 +1,320 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.JobConfigurationLoad; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery Configuration for a load operation. A load configuration can be used to load data + * into a table with a {@link com.google.gcloud.WriteChannel} + * ({@link BigQuery#writer(WriteChannelConfiguration)}). + */ +public class WriteChannelConfiguration implements LoadConfiguration, Serializable { + + private static final long serialVersionUID = 470267591917413578L; + + private final TableId destinationTable; + private final CreateDisposition createDisposition; + private final WriteDisposition writeDisposition; + private final FormatOptions formatOptions; + private final Integer maxBadRecords; + private final Schema schema; + private final Boolean ignoreUnknownValues; + private final List projectionFields; + + public static final class Builder implements LoadConfiguration.Builder { + + private TableId destinationTable; + private CreateDisposition createDisposition; + private WriteDisposition writeDisposition; + private FormatOptions formatOptions; + private Integer maxBadRecords; + private Schema schema; + private Boolean ignoreUnknownValues; + private List projectionFields; + + private Builder() {} + + private Builder(WriteChannelConfiguration writeChannelConfiguration) { + this.destinationTable = writeChannelConfiguration.destinationTable; + this.createDisposition = writeChannelConfiguration.createDisposition; + this.writeDisposition = writeChannelConfiguration.writeDisposition; + this.formatOptions = writeChannelConfiguration.formatOptions; + this.maxBadRecords = writeChannelConfiguration.maxBadRecords; + this.schema = writeChannelConfiguration.schema; + this.ignoreUnknownValues = writeChannelConfiguration.ignoreUnknownValues; + this.projectionFields = writeChannelConfiguration.projectionFields; + } + + private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) { + JobConfigurationLoad loadConfigurationPb = configurationPb.getLoad(); + this.destinationTable = TableId.fromPb(loadConfigurationPb.getDestinationTable()); + if (loadConfigurationPb.getCreateDisposition() != null) { + this.createDisposition = + CreateDisposition.valueOf(loadConfigurationPb.getCreateDisposition()); + } + if (loadConfigurationPb.getWriteDisposition() != null) { + this.writeDisposition = WriteDisposition.valueOf(loadConfigurationPb.getWriteDisposition()); + } + if (loadConfigurationPb.getSourceFormat() != null) { + this.formatOptions = FormatOptions.of(loadConfigurationPb.getSourceFormat()); + } + if (loadConfigurationPb.getAllowJaggedRows() != null + || loadConfigurationPb.getAllowQuotedNewlines() != null + || loadConfigurationPb.getEncoding() != null + || loadConfigurationPb.getFieldDelimiter() != null + || loadConfigurationPb.getQuote() != null + || loadConfigurationPb.getSkipLeadingRows() != null) { + CsvOptions.Builder builder = CsvOptions.builder() + .allowJaggedRows(loadConfigurationPb.getAllowJaggedRows()) + .allowQuotedNewLines(loadConfigurationPb.getAllowQuotedNewlines()) + .encoding(loadConfigurationPb.getEncoding()) + .fieldDelimiter(loadConfigurationPb.getFieldDelimiter()) + .quote(loadConfigurationPb.getQuote()) + .skipLeadingRows(loadConfigurationPb.getSkipLeadingRows()); + this.formatOptions = builder.build(); + } + this.maxBadRecords = loadConfigurationPb.getMaxBadRecords(); + if (loadConfigurationPb.getSchema() != null) { + this.schema = Schema.fromPb(loadConfigurationPb.getSchema()); + } + this.ignoreUnknownValues = loadConfigurationPb.getIgnoreUnknownValues(); + this.projectionFields = loadConfigurationPb.getProjectionFields(); + } + + @Override + public Builder destinationTable(TableId destinationTable) { + this.destinationTable = destinationTable; + return this; + } + + @Override + public Builder createDisposition(CreateDisposition createDisposition) { + this.createDisposition = createDisposition; + return this; + } + + @Override + public Builder writeDisposition(WriteDisposition writeDisposition) { + this.writeDisposition = writeDisposition; + return this; + } + + @Override + public Builder formatOptions(FormatOptions formatOptions) { + this.formatOptions = formatOptions; + return this; + } + + @Override + public Builder maxBadRecords(Integer maxBadRecords) { + this.maxBadRecords = maxBadRecords; + return this; + } + + @Override + public Builder schema(Schema schema) { + this.schema = schema; + return this; + } + + @Override + public Builder ignoreUnknownValues(Boolean ignoreUnknownValues) { + this.ignoreUnknownValues = ignoreUnknownValues; + return this; + } + + @Override + public Builder projectionFields(List projectionFields) { + this.projectionFields = + projectionFields != null ? ImmutableList.copyOf(projectionFields) : null; + return this; + } + + public WriteChannelConfiguration build() { + return new WriteChannelConfiguration(this); + } + } + + protected WriteChannelConfiguration(Builder builder) { + this.destinationTable = checkNotNull(builder.destinationTable); + this.createDisposition = builder.createDisposition; + this.writeDisposition = builder.writeDisposition; + this.formatOptions = builder.formatOptions; + this.maxBadRecords = builder.maxBadRecords; + this.schema = builder.schema; + this.ignoreUnknownValues = builder.ignoreUnknownValues; + this.projectionFields = builder.projectionFields; + } + + @Override + public TableId destinationTable() { + return destinationTable; + } + + @Override + public CreateDisposition createDisposition() { + return this.createDisposition; + } + + @Override + public WriteDisposition writeDisposition() { + return writeDisposition; + } + + @Override + public CsvOptions csvOptions() { + return formatOptions instanceof CsvOptions ? (CsvOptions) formatOptions : null; + } + + @Override + public Integer maxBadRecords() { + return maxBadRecords; + } + + @Override + public Schema schema() { + return schema; + } + + @Override + public String format() { + return formatOptions != null ? formatOptions.type() : null; + } + + @Override + public Boolean ignoreUnknownValues() { + return ignoreUnknownValues; + } + + @Override + public List projectionFields() { + return projectionFields; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + MoreObjects.ToStringHelper toStringHelper() { + return MoreObjects.toStringHelper(this) + .add("destinationTable", destinationTable) + .add("createDisposition", createDisposition) + .add("writeDisposition", writeDisposition) + .add("formatOptions", formatOptions) + .add("maxBadRecords", maxBadRecords) + .add("schema", schema) + .add("ignoreUnknownValue", ignoreUnknownValues) + .add("projectionFields", projectionFields); + } + + @Override + public String toString() { + return toStringHelper().toString(); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof WriteChannelConfiguration + && Objects.equals(toPb(), ((WriteChannelConfiguration) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(destinationTable, createDisposition, writeDisposition, formatOptions, + maxBadRecords, schema, ignoreUnknownValues, projectionFields); + } + + WriteChannelConfiguration setProjectId(String projectId) { + return toBuilder().destinationTable(destinationTable().setProjectId(projectId)).build(); + } + + com.google.api.services.bigquery.model.JobConfiguration toPb() { + JobConfigurationLoad loadConfigurationPb = new JobConfigurationLoad(); + loadConfigurationPb.setDestinationTable(destinationTable.toPb()); + if (createDisposition != null) { + loadConfigurationPb.setCreateDisposition(createDisposition.toString()); + } + if (writeDisposition != null) { + loadConfigurationPb.setWriteDisposition(writeDisposition.toString()); + } + if (csvOptions() != null) { + CsvOptions csvOptions = csvOptions(); + loadConfigurationPb.setFieldDelimiter(csvOptions.fieldDelimiter()) + .setAllowJaggedRows(csvOptions.allowJaggedRows()) + .setAllowQuotedNewlines(csvOptions.allowQuotedNewLines()) + .setEncoding(csvOptions.encoding()) + .setQuote(csvOptions.quote()) + .setSkipLeadingRows(csvOptions.skipLeadingRows()); + } + if (schema != null) { + loadConfigurationPb.setSchema(schema.toPb()); + } + if (formatOptions != null) { + loadConfigurationPb.setSourceFormat(formatOptions.type()); + } + loadConfigurationPb.setMaxBadRecords(maxBadRecords); + loadConfigurationPb.setIgnoreUnknownValues(ignoreUnknownValues); + loadConfigurationPb.setProjectionFields(projectionFields); + return new com.google.api.services.bigquery.model.JobConfiguration() + .setLoad(loadConfigurationPb); + } + + static WriteChannelConfiguration fromPb( + com.google.api.services.bigquery.model.JobConfiguration configurationPb) { + return new Builder(configurationPb).build(); + } + + /** + * Creates a builder for a BigQuery Load Configuration given the destination table. + */ + public static Builder builder(TableId destinationTable) { + return new Builder().destinationTable(destinationTable); + } + + /** + * Creates a builder for a BigQuery Load Configuration given the destination table and format. + */ + public static Builder builder(TableId destinationTable, FormatOptions format) { + return builder(destinationTable).formatOptions(format); + } + + /** + * Returns a BigQuery Load Configuration for the given destination table. + */ + public static WriteChannelConfiguration of(TableId destinationTable) { + return builder(destinationTable).build(); + } + + /** + * Returns a BigQuery Load Configuration for the given destination table and format. + */ + public static WriteChannelConfiguration of(TableId destinationTable, FormatOptions format) { + return builder(destinationTable).formatOptions(format).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/package-info.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/package-info.java new file mode 100644 index 000000000000..dd57da2b606a --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/package-info.java @@ -0,0 +1,47 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * A client to Google Cloud BigQuery. + * + *

A simple usage example: + *

 {@code
+ * BigQuery bigquery = BigQueryOptions.defaultInstance().service();
+ * TableId tableId = TableId.of("dataset", "table");
+ * BaseTableInfo info = bigquery.getTable(tableId);
+ * if (info == null) {
+ *   System.out.println("Creating table " + tableId);
+ *   Field integerField = Field.of("fieldName", Field.Type.integer());
+ *   bigquery.create(TableInfo.of(tableId, Schema.of(integerField)));
+ * } else {
+ *   System.out.println("Loading data into table " + tableId);
+ *   LoadJobConfiguration configuration = LoadJobConfiguration.of(tableId, "gs://bucket/path");
+ *   JobInfo loadJob = JobInfo.of(configuration);
+ *   loadJob = bigquery.create(loadJob);
+ *   while (loadJob.status().state() != JobStatus.State.DONE) {
+ *     Thread.sleep(1000L);
+ *     loadJob = bigquery.getJob(loadJob.jobId());
+ *   }
+ *   if (loadJob.status().error() != null) {
+ *     System.out.println("Job completed with errors");
+ *   } else {
+ *     System.out.println("Job succeeded");
+ *   }
+ * }}
+ * + * @see Google Cloud BigQuery + */ +package com.google.gcloud.bigquery; diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/RemoteBigQueryHelper.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/RemoteBigQueryHelper.java new file mode 100644 index 000000000000..491e822d683c --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/RemoteBigQueryHelper.java @@ -0,0 +1,140 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery.testing; + +import com.google.gcloud.AuthCredentials; +import com.google.gcloud.RetryParams; +import com.google.gcloud.bigquery.BigQuery; +import com.google.gcloud.bigquery.BigQueryException; +import com.google.gcloud.bigquery.BigQueryOptions; + +import java.io.IOException; +import java.io.InputStream; +import java.util.UUID; +import java.util.logging.Level; +import java.util.logging.Logger; + +/** + * Utility to create a remote BigQuery configuration for testing. BigQuery options can be obtained + * via the {@link #options()} method. Returned options have custom + * {@link BigQueryOptions#retryParams()}: {@link RetryParams#retryMaxAttempts()} is {@code 10}, + * {@link RetryParams#retryMinAttempts()} is {@code 6}, {@link RetryParams#maxRetryDelayMillis()} is + * {@code 30000}, {@link RetryParams#totalRetryPeriodMillis()} is {@code 120000} and + * {@link RetryParams#initialRetryDelayMillis()} is {@code 250}. + * {@link BigQueryOptions#connectTimeout()} and {@link BigQueryOptions#readTimeout()} are both set + * to {@code 60000}. + */ +public class RemoteBigQueryHelper { + + private static final Logger log = Logger.getLogger(RemoteBigQueryHelper.class.getName()); + private static final String DATASET_NAME_PREFIX = "gcloud_test_dataset_temp_"; + private final BigQueryOptions options; + + private RemoteBigQueryHelper(BigQueryOptions options) { + this.options = options; + } + + /** + * Returns a {@link BigQueryOptions} object to be used for testing. + */ + public BigQueryOptions options() { + return options; + } + + /** + * Deletes a dataset, even if non-empty. + * + * @param bigquery the BigQuery service to be used to issue the delete request + * @param dataset the dataset to be deleted + * @return {@code true} if deletion succeeded, {@code false} if the dataset was not found + * @throws BigQueryException upon failure + */ + public static boolean forceDelete(BigQuery bigquery, String dataset) { + return bigquery.delete(dataset, BigQuery.DatasetDeleteOption.deleteContents()); + } + + /** + * Returns a dataset name generated using a random UUID. + */ + public static String generateDatasetName() { + return DATASET_NAME_PREFIX + UUID.randomUUID().toString().replace('-', '_'); + } + + /** + * Creates a {@code RemoteBigQueryHelper} object for the given project id and JSON key input + * stream. + * + * @param projectId id of the project to be used for running the tests + * @param keyStream input stream for a JSON key + * @return A {@code RemoteBigQueryHelper} object for the provided options + * @throws BigQueryHelperException if {@code keyStream} is not a valid JSON key stream + */ + public static RemoteBigQueryHelper create(String projectId, InputStream keyStream) + throws BigQueryHelperException { + try { + BigQueryOptions bigqueryOptions = BigQueryOptions.builder() + .authCredentials(AuthCredentials.createForJson(keyStream)) + .projectId(projectId) + .retryParams(retryParams()) + .connectTimeout(60000) + .readTimeout(60000) + .build(); + return new RemoteBigQueryHelper(bigqueryOptions); + } catch (IOException ex) { + if (log.isLoggable(Level.WARNING)) { + log.log(Level.WARNING, ex.getMessage()); + } + throw BigQueryHelperException.translate(ex); + } + } + + /** + * Creates a {@code RemoteBigQueryHelper} object using default project id and authentication + * credentials. + */ + public static RemoteBigQueryHelper create() { + BigQueryOptions bigqueryOptions = BigQueryOptions.builder() + .retryParams(retryParams()) + .connectTimeout(60000) + .readTimeout(60000) + .build(); + return new RemoteBigQueryHelper(bigqueryOptions); + } + + private static RetryParams retryParams() { + return RetryParams.builder() + .retryMaxAttempts(10) + .retryMinAttempts(6) + .maxRetryDelayMillis(30000) + .totalRetryPeriodMillis(120000) + .initialRetryDelayMillis(250) + .build(); + } + + public static class BigQueryHelperException extends RuntimeException { + + private static final long serialVersionUID = 3984993496060055562L; + + public BigQueryHelperException(String message, Throwable cause) { + super(message, cause); + } + + public static BigQueryHelperException translate(Exception ex) { + return new BigQueryHelperException(ex.getMessage(), ex); + } + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/package-info.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/package-info.java new file mode 100644 index 000000000000..9ca792ecd77d --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/package-info.java @@ -0,0 +1,38 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * A testing helper for Google BigQuery. + * + *

A simple usage example: + * + *

Before the test: + *

 {@code
+ * RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create();
+ * BigQuery bigquery = bigqueryHelper.options().service();
+ * String dataset = RemoteBigQueryHelper.generateDatasetName();
+ * bigquery.create(DatasetInfo.builder(dataset).build());
+ * } 
+ * + *

After the test: + *

 {@code
+ * RemoteBigQueryHelper.forceDelete(bigquery, DATASET);
+ * }
+ * + * @see + * gcloud-java tools for testing + */ +package com.google.gcloud.bigquery.testing; diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpc.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpc.java new file mode 100644 index 000000000000..6062e19950e0 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpc.java @@ -0,0 +1,211 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.spi; + +import com.google.api.services.bigquery.model.Dataset; +import com.google.api.services.bigquery.model.GetQueryResultsResponse; +import com.google.api.services.bigquery.model.Job; +import com.google.api.services.bigquery.model.JobConfiguration; +import com.google.api.services.bigquery.model.QueryRequest; +import com.google.api.services.bigquery.model.QueryResponse; +import com.google.api.services.bigquery.model.Table; +import com.google.api.services.bigquery.model.TableDataInsertAllRequest; +import com.google.api.services.bigquery.model.TableDataInsertAllResponse; +import com.google.api.services.bigquery.model.TableRow; +import com.google.gcloud.bigquery.BigQueryException; + +import java.util.Map; + +public interface BigQueryRpc { + + // These options are part of the Google Cloud BigQuery query parameters + enum Option { + FIELDS("fields"), + DELETE_CONTENTS("deleteContents"), + ALL_DATASETS("all"), + ALL_USERS("allUsers"), + MAX_RESULTS("maxResults"), + PAGE_TOKEN("pageToken"), + START_INDEX("startIndex"), + STATE_FILTER("stateFilter"), + TIMEOUT("timeoutMs"); + + private final String value; + + Option(String value) { + this.value = value; + } + + public String value() { + return value; + } + + @SuppressWarnings("unchecked") + T get(Map options) { + return (T) options.get(this); + } + + String getString(Map options) { + return get(options); + } + + Long getLong(Map options) { + return get(options); + } + + Boolean getBoolean(Map options) { + return get(options); + } + } + + class Tuple { + + private final X x; + private final Y y; + + private Tuple(X x, Y y) { + this.x = x; + this.y = y; + } + + public static Tuple of(X x, Y y) { + return new Tuple<>(x, y); + } + + public X x() { + return x; + } + + public Y y() { + return y; + } + } + + /** + * Returns the requested dataset or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + Dataset getDataset(String datasetId, Map options) throws BigQueryException; + + /** + * Lists the project's datasets. Partial information is returned on a dataset (datasetReference, + * friendlyName and id). To get full information use {@link #getDataset(String, Map)}. + * + * @throws BigQueryException upon failure + */ + Tuple> listDatasets(Map options) throws BigQueryException; + + Dataset create(Dataset dataset, Map options) throws BigQueryException; + + Table create(Table table, Map options) throws BigQueryException; + + Job create(Job job, Map options) throws BigQueryException; + + /** + * Delete the requested dataset. + * + * @return {@code true} if dataset was deleted, {@code false} if it was not found + * @throws BigQueryException upon failure + */ + boolean deleteDataset(String datasetId, Map options) throws BigQueryException; + + Dataset patch(Dataset dataset, Map options) throws BigQueryException; + + Table patch(Table table, Map options) throws BigQueryException; + + /** + * Returns the requested table or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + Table getTable(String datasetId, String tableId, Map options) throws BigQueryException; + + /** + * Lists the dataset's tables. Partial information is returned on a table (tableReference, + * friendlyName, id and type). To get full information use {@link #getTable(String, String, Map)}. + * + * @throws BigQueryException upon failure + */ + Tuple> listTables(String dataset, Map options) + throws BigQueryException; + + /** + * Delete the requested table. + * + * @return {@code true} if table was deleted, {@code false} if it was not found + * @throws BigQueryException upon failure + */ + boolean deleteTable(String datasetId, String tableId) throws BigQueryException; + + TableDataInsertAllResponse insertAll(String datasetId, String tableId, + TableDataInsertAllRequest request) throws BigQueryException; + + Tuple> listTableData(String datasetId, String tableId, + Map options) throws BigQueryException; + + /** + * Returns the requested job or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + Job getJob(String jobId, Map options) throws BigQueryException; + + /** + * Lists the project's jobs. + * + * @throws BigQueryException upon failure + */ + Tuple> listJobs(Map options) throws BigQueryException; + + /** + * Sends a job cancel request. This call will return immediately, and the client will need to poll + * for the job status to see if the cancel completed successfully. + * + * @return {@code true} if cancel was requested successfully, {@code false} if the job was not + * found + * @throws BigQueryException upon failure + */ + boolean cancel(String jobId) throws BigQueryException; + + GetQueryResultsResponse getQueryResults(String jobId, Map options) + throws BigQueryException; + + QueryResponse query(QueryRequest request) throws BigQueryException; + + /** + * Opens a resumable upload session to load data into a BigQuery table and returns an upload URI. + * + * @param configuration load configuration + * @throws BigQueryException upon failure + */ + String open(JobConfiguration configuration) throws BigQueryException; + + /** + * Uploads the provided data to the resumable upload session at the specified position. + * + * @param uploadId the resumable upload session URI + * @param toWrite a byte array of data to upload + * @param toWriteOffset offset in the {@code toWrite} param to start writing from + * @param destOffset offset in the destination where to upload data to + * @param length the number of bytes to upload + * @param last {@code true} indicates that the last chunk is being uploaded + * @throws BigQueryException upon failure + */ + void write(String uploadId, byte[] toWrite, int toWriteOffset, long destOffset, int length, + boolean last) throws BigQueryException; +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpcFactory.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpcFactory.java new file mode 100644 index 000000000000..2706868756a5 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpcFactory.java @@ -0,0 +1,26 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.spi; + +import com.google.gcloud.bigquery.BigQueryOptions; + +/** + * An interface for BigQuery RPC factory. + * Implementation will be loaded via {@link java.util.ServiceLoader}. + */ +public interface BigQueryRpcFactory extends ServiceRpcFactory { +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java new file mode 100644 index 000000000000..b57f1dc8a128 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java @@ -0,0 +1,469 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +package com.google.gcloud.spi; + +import static com.google.gcloud.spi.BigQueryRpc.Option.DELETE_CONTENTS; +import static com.google.gcloud.spi.BigQueryRpc.Option.FIELDS; +import static com.google.gcloud.spi.BigQueryRpc.Option.MAX_RESULTS; +import static com.google.gcloud.spi.BigQueryRpc.Option.PAGE_TOKEN; +import static com.google.gcloud.spi.BigQueryRpc.Option.START_INDEX; +import static com.google.gcloud.spi.BigQueryRpc.Option.TIMEOUT; +import static java.net.HttpURLConnection.HTTP_CREATED; +import static java.net.HttpURLConnection.HTTP_NOT_FOUND; +import static java.net.HttpURLConnection.HTTP_OK; + +import com.google.api.client.http.ByteArrayContent; +import com.google.api.client.http.GenericUrl; +import com.google.api.client.http.HttpRequest; +import com.google.api.client.http.HttpRequestFactory; +import com.google.api.client.http.HttpRequestInitializer; +import com.google.api.client.http.HttpResponse; +import com.google.api.client.http.HttpResponseException; +import com.google.api.client.http.HttpTransport; +import com.google.api.client.http.json.JsonHttpContent; +import com.google.api.client.json.JsonFactory; +import com.google.api.client.json.jackson.JacksonFactory; +import com.google.api.services.bigquery.Bigquery; +import com.google.api.services.bigquery.model.Dataset; +import com.google.api.services.bigquery.model.DatasetList; +import com.google.api.services.bigquery.model.DatasetReference; +import com.google.api.services.bigquery.model.GetQueryResultsResponse; +import com.google.api.services.bigquery.model.Job; +import com.google.api.services.bigquery.model.JobConfiguration; +import com.google.api.services.bigquery.model.JobList; +import com.google.api.services.bigquery.model.JobStatus; +import com.google.api.services.bigquery.model.QueryRequest; +import com.google.api.services.bigquery.model.QueryResponse; +import com.google.api.services.bigquery.model.Table; +import com.google.api.services.bigquery.model.TableDataInsertAllRequest; +import com.google.api.services.bigquery.model.TableDataInsertAllResponse; +import com.google.api.services.bigquery.model.TableDataList; +import com.google.api.services.bigquery.model.TableList; +import com.google.api.services.bigquery.model.TableReference; +import com.google.api.services.bigquery.model.TableRow; +import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; + +import com.google.gcloud.bigquery.BigQueryException; +import com.google.gcloud.bigquery.BigQueryOptions; + +import java.io.IOException; +import java.math.BigInteger; +import java.util.List; +import java.util.Map; + +public class DefaultBigQueryRpc implements BigQueryRpc { + + public static final String DEFAULT_PROJECTION = "full"; + private static final String BASE_RESUMABLE_URI = + "https://www.googleapis.com/upload/bigquery/v2/projects/"; + // see: https://cloud.google.com/bigquery/loading-data-post-request#resume-upload + private static final int HTTP_RESUME_INCOMPLETE = 308; + private final BigQueryOptions options; + private final Bigquery bigquery; + + public DefaultBigQueryRpc(BigQueryOptions options) { + HttpTransport transport = options.httpTransportFactory().create(); + HttpRequestInitializer initializer = options.httpRequestInitializer(); + this.options = options; + bigquery = new Bigquery.Builder(transport, new JacksonFactory(), initializer) + .setRootUrl(options.host()) + .setApplicationName(options.applicationName()) + .build(); + } + + private static BigQueryException translate(IOException exception) { + return new BigQueryException(exception); + } + + @Override + public Dataset getDataset(String datasetId, Map options) throws BigQueryException { + try { + return bigquery.datasets() + .get(this.options.projectId(), datasetId) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return null; + } + throw serviceException; + } + } + + @Override + public Tuple> listDatasets(Map options) + throws BigQueryException { + try { + DatasetList datasetsList = bigquery.datasets() + .list(this.options.projectId()) + .setAll(Option.ALL_DATASETS.getBoolean(options)) + .setMaxResults(MAX_RESULTS.getLong(options)) + .setPageToken(PAGE_TOKEN.getString(options)) + .execute(); + Iterable datasets = datasetsList.getDatasets(); + return Tuple.of(datasetsList.getNextPageToken(), + Iterables.transform(datasets != null ? datasets + : ImmutableList.of(), + new Function() { + @Override + public Dataset apply(DatasetList.Datasets datasetPb) { + return new Dataset() + .setDatasetReference(datasetPb.getDatasetReference()) + .setFriendlyName(datasetPb.getFriendlyName()) + .setId(datasetPb.getId()) + .setKind(datasetPb.getKind()); + } + })); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Dataset create(Dataset dataset, Map options) throws BigQueryException { + try { + return bigquery.datasets().insert(this.options.projectId(), dataset) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Table create(Table table, Map options) + throws BigQueryException { + try { + // unset the type, as it is output only + table.setType(null); + return bigquery.tables() + .insert(this.options.projectId(), table.getTableReference().getDatasetId(), table) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Job create(Job job, Map options) throws BigQueryException { + try { + return bigquery.jobs() + .insert(this.options.projectId(), job) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public boolean deleteDataset(String datasetId, Map options) throws BigQueryException { + try { + bigquery.datasets().delete(this.options.projectId(), datasetId) + .setDeleteContents(DELETE_CONTENTS.getBoolean(options)) + .execute(); + return true; + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return false; + } + throw serviceException; + } + } + + @Override + public Dataset patch(Dataset dataset, Map options) throws BigQueryException { + try { + DatasetReference reference = dataset.getDatasetReference(); + return bigquery.datasets() + .patch(this.options.projectId(), reference.getDatasetId(), dataset) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Table patch(Table table, Map options) throws BigQueryException { + try { + // unset the type, as it is output only + table.setType(null); + TableReference reference = table.getTableReference(); + return bigquery.tables() + .patch(this.options.projectId(), reference.getDatasetId(), reference.getTableId(), table) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Table getTable(String datasetId, String tableId, Map options) + throws BigQueryException { + try { + return bigquery.tables() + .get(this.options.projectId(), datasetId, tableId) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return null; + } + throw serviceException; + } + } + + @Override + public Tuple> listTables(String datasetId, Map options) + throws BigQueryException { + try { + TableList tableList = bigquery.tables() + .list(this.options.projectId(), datasetId) + .setMaxResults(MAX_RESULTS.getLong(options)) + .setPageToken(PAGE_TOKEN.getString(options)) + .execute(); + Iterable tables = tableList.getTables(); + return Tuple.of(tableList.getNextPageToken(), + Iterables.transform(tables != null ? tables : ImmutableList.of(), + new Function() { + @Override + public Table apply(TableList.Tables tablePb) { + return new Table() + .setFriendlyName(tablePb.getFriendlyName()) + .setId(tablePb.getId()) + .setKind(tablePb.getKind()) + .setTableReference(tablePb.getTableReference()) + .setType(tablePb.getType()); + } + })); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public boolean deleteTable(String datasetId, String tableId) throws BigQueryException { + try { + bigquery.tables().delete(this.options.projectId(), datasetId, tableId).execute(); + return true; + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return false; + } + throw serviceException; + } + } + + @Override + public TableDataInsertAllResponse insertAll(String datasetId, String tableId, + TableDataInsertAllRequest request) throws BigQueryException { + try { + return bigquery.tabledata() + .insertAll(this.options.projectId(), datasetId, tableId, request) + .execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Tuple> listTableData(String datasetId, String tableId, + Map options) throws BigQueryException { + try { + TableDataList tableDataList = bigquery.tabledata() + .list(this.options.projectId(), datasetId, tableId) + .setMaxResults(MAX_RESULTS.getLong(options)) + .setPageToken(PAGE_TOKEN.getString(options)) + .setStartIndex(START_INDEX.getLong(options) != null + ? BigInteger.valueOf(START_INDEX.getLong(options)) : null) + .execute(); + return Tuple.>of(tableDataList.getPageToken(), + tableDataList.getRows()); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Job getJob(String jobId, Map options) throws BigQueryException { + try { + return bigquery.jobs() + .get(this.options.projectId(), jobId) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return null; + } + throw serviceException; + } + } + + @Override + public Tuple> listJobs(Map options) throws BigQueryException { + try { + JobList jobsList = bigquery.jobs() + .list(this.options.projectId()) + .setAllUsers(Option.ALL_USERS.getBoolean(options)) + .setFields(Option.FIELDS.getString(options)) + .setStateFilter(Option.STATE_FILTER.>get(options)) + .setMaxResults(MAX_RESULTS.getLong(options)) + .setPageToken(PAGE_TOKEN.getString(options)) + .setProjection(DEFAULT_PROJECTION) + .execute(); + Iterable jobs = jobsList.getJobs(); + return Tuple.of(jobsList.getNextPageToken(), + Iterables.transform(jobs != null ? jobs : ImmutableList.of(), + new Function() { + @Override + public Job apply(JobList.Jobs jobPb) { + JobStatus statusPb = jobPb.getStatus() != null + ? jobPb.getStatus() : new JobStatus(); + if (statusPb.getState() == null) { + statusPb.setState(jobPb.getState()); + } + if (statusPb.getErrorResult() == null) { + statusPb.setErrorResult(jobPb.getErrorResult()); + } + return new Job() + .setConfiguration(jobPb.getConfiguration()) + .setId(jobPb.getId()) + .setJobReference(jobPb.getJobReference()) + .setKind(jobPb.getKind()) + .setStatistics(jobPb.getStatistics()) + .setStatus(statusPb) + .setUserEmail(jobPb.getUserEmail()); + } + })); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public boolean cancel(String jobId) throws BigQueryException { + try { + bigquery.jobs().cancel(this.options.projectId(), jobId).execute(); + return true; + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return false; + } + throw serviceException; + } + } + + @Override + public GetQueryResultsResponse getQueryResults(String jobId, Map options) + throws BigQueryException { + try { + return bigquery.jobs().getQueryResults(this.options.projectId(), jobId) + .setMaxResults(MAX_RESULTS.getLong(options)) + .setPageToken(PAGE_TOKEN.getString(options)) + .setStartIndex(START_INDEX.getLong(options) != null + ? BigInteger.valueOf(START_INDEX.getLong(options)) : null) + .setTimeoutMs(TIMEOUT.getLong(options)) + .execute(); + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return null; + } + throw serviceException; + } + } + + @Override + public QueryResponse query(QueryRequest request) throws BigQueryException { + try { + return bigquery.jobs().query(this.options.projectId(), request).execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public String open(JobConfiguration configuration) throws BigQueryException { + try { + Job loadJob = new Job().setConfiguration(configuration); + StringBuilder builder = new StringBuilder() + .append(BASE_RESUMABLE_URI) + .append(options.projectId()) + .append("/jobs"); + GenericUrl url = new GenericUrl(builder.toString()); + url.set("uploadType", "resumable"); + JsonFactory jsonFactory = bigquery.getJsonFactory(); + HttpRequestFactory requestFactory = bigquery.getRequestFactory(); + HttpRequest httpRequest = + requestFactory.buildPostRequest(url, new JsonHttpContent(jsonFactory, loadJob)); + httpRequest.getHeaders().set("X-Upload-Content-Value", "application/octet-stream"); + HttpResponse response = httpRequest.execute(); + return response.getHeaders().getLocation(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public void write(String uploadId, byte[] toWrite, int toWriteOffset, long destOffset, int length, + boolean last) throws BigQueryException { + try { + GenericUrl url = new GenericUrl(uploadId); + HttpRequest httpRequest = bigquery.getRequestFactory().buildPutRequest(url, + new ByteArrayContent(null, toWrite, toWriteOffset, length)); + long limit = destOffset + length; + StringBuilder range = new StringBuilder("bytes "); + range.append(destOffset).append('-').append(limit - 1).append('/'); + if (last) { + range.append(limit); + } else { + range.append('*'); + } + httpRequest.getHeaders().setContentRange(range.toString()); + int code; + String message; + IOException exception = null; + try { + HttpResponse response = httpRequest.execute(); + code = response.getStatusCode(); + message = response.getStatusMessage(); + } catch (HttpResponseException ex) { + exception = ex; + code = ex.getStatusCode(); + message = ex.getStatusMessage(); + } + if (!last && code != HTTP_RESUME_INCOMPLETE + || last && !(code == HTTP_OK || code == HTTP_CREATED)) { + if (exception != null) { + throw exception; + } + throw new BigQueryException(code, message); + } + } catch (IOException ex) { + throw translate(ex); + } + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/AclTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/AclTest.java new file mode 100644 index 000000000000..438526b95b6e --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/AclTest.java @@ -0,0 +1,95 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.api.services.bigquery.model.Dataset; +import com.google.gcloud.bigquery.Acl.Domain; +import com.google.gcloud.bigquery.Acl.Entity; +import com.google.gcloud.bigquery.Acl.Entity.Type; +import com.google.gcloud.bigquery.Acl.Group; +import com.google.gcloud.bigquery.Acl.Role; +import com.google.gcloud.bigquery.Acl.User; +import com.google.gcloud.bigquery.Acl.View; + +import org.junit.Test; + +public class AclTest { + + @Test + public void testDomainEntity() { + Domain entity = new Domain("d1"); + assertEquals("d1", entity.domain()); + assertEquals(Type.DOMAIN, entity.type()); + Dataset.Access pb = entity.toPb(); + assertEquals(entity, Entity.fromPb(pb)); + } + + @Test + public void testGroupEntity() { + Group entity = new Group("g1"); + assertEquals("g1", entity.identifier()); + assertEquals(Type.GROUP, entity.type()); + Dataset.Access pb = entity.toPb(); + assertEquals(entity, Entity.fromPb(pb)); + } + + @Test + public void testSpecialGroupEntity() { + Group entity = Group.ofAllAuthenticatedUsers(); + assertEquals("allAuthenticatedUsers", entity.identifier()); + entity = Group.ofProjectWriters(); + assertEquals("projectWriters", entity.identifier()); + entity = Group.ofProjectReaders(); + assertEquals("projectReaders", entity.identifier()); + entity = Group.ofProjectOwners(); + assertEquals("projectOwners", entity.identifier()); + } + + @Test + public void testUserEntity() { + User entity = new User("u1"); + assertEquals("u1", entity.email()); + assertEquals(Type.USER, entity.type()); + Dataset.Access pb = entity.toPb(); + assertEquals(entity, Entity.fromPb(pb)); + } + + @Test + public void testViewEntity() { + TableId viewId = TableId.of("project", "dataset", "view"); + View entity = new View(viewId); + assertEquals(viewId, entity.id()); + assertEquals(Type.VIEW, entity.type()); + Dataset.Access pb = entity.toPb(); + assertEquals(entity, Entity.fromPb(pb)); + } + + @Test + public void testOf() { + Acl acl = Acl.of(Group.ofAllAuthenticatedUsers(), Role.READER); + assertEquals(Group.ofAllAuthenticatedUsers(), acl.entity()); + assertEquals(Role.READER, acl.role()); + Dataset.Access pb = acl.toPb(); + assertEquals(acl, Acl.fromPb(pb)); + View view = new View(TableId.of("project", "dataset", "view")); + acl = Acl.of(view); + assertEquals(view, acl.entity()); + assertEquals(null, acl.role()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryErrorTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryErrorTest.java new file mode 100644 index 000000000000..c8de039e233f --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryErrorTest.java @@ -0,0 +1,45 @@ +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class BigQueryErrorTest { + + private static final String REASON = "reason"; + private static final String LOCATION = "location"; + private static final String DEBUG_INFO = "debugInfo"; + private static final String MESSAGE = "message"; + private static final BigQueryError ERROR = + new BigQueryError(REASON, LOCATION, MESSAGE, DEBUG_INFO); + private static final BigQueryError ERROR_INCOMPLETE = + new BigQueryError(REASON, LOCATION, MESSAGE); + + @Test + public void testConstructor() { + assertEquals(REASON, ERROR.reason()); + assertEquals(LOCATION, ERROR.location()); + assertEquals(DEBUG_INFO, ERROR.debugInfo()); + assertEquals(MESSAGE, ERROR.message()); + assertEquals(REASON, ERROR_INCOMPLETE.reason()); + assertEquals(LOCATION, ERROR_INCOMPLETE.location()); + assertEquals(null, ERROR_INCOMPLETE.debugInfo()); + assertEquals(MESSAGE, ERROR_INCOMPLETE.message()); + } + + @Test + public void testToAndFromPb() { + compareBigQueryError(ERROR, BigQueryError.fromPb(ERROR.toPb())); + compareBigQueryError(ERROR_INCOMPLETE, BigQueryError.fromPb(ERROR_INCOMPLETE.toPb())); + } + + private void compareBigQueryError(BigQueryError expected, BigQueryError value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.reason(), value.reason()); + assertEquals(expected.location(), value.location()); + assertEquals(expected.debugInfo(), value.debugInfo()); + assertEquals(expected.message(), value.message()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryExceptionTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryExceptionTest.java new file mode 100644 index 000000000000..66e5289424e2 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryExceptionTest.java @@ -0,0 +1,115 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.gcloud.BaseServiceException; +import com.google.gcloud.RetryHelper.RetryHelperException; + +import org.junit.Test; + +import java.io.IOException; +import java.net.SocketTimeoutException; + +public class BigQueryExceptionTest { + + @Test + public void testBigqueryException() { + BigQueryException exception = new BigQueryException(500, "message"); + assertEquals(500, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertNull(exception.error()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new BigQueryException(502, "message"); + assertEquals(502, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertNull(exception.error()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new BigQueryException(503, "message"); + assertEquals(503, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertNull(exception.error()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new BigQueryException(504, "message"); + assertEquals(504, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertNull(exception.error()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new BigQueryException(400, "message"); + assertEquals(400, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertNull(exception.error()); + assertFalse(exception.retryable()); + assertTrue(exception.idempotent()); + + BigQueryError error = new BigQueryError("reason", null, null); + exception = new BigQueryException(504, "message", error); + assertEquals(504, exception.code()); + assertEquals("message", exception.getMessage()); + assertEquals("reason", exception.reason()); + assertEquals(error, exception.error()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + IOException cause = new SocketTimeoutException(); + exception = new BigQueryException(cause); + assertNull(exception.reason()); + assertNull(exception.getMessage()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + assertEquals(cause, exception.getCause()); + } + + @Test + public void testTranslateAndThrow() throws Exception { + BigQueryException cause = new BigQueryException(503, "message"); + RetryHelperException exceptionMock = createMock(RetryHelperException.class); + expect(exceptionMock.getCause()).andReturn(cause).times(2); + replay(exceptionMock); + try { + BigQueryException.translateAndThrow(exceptionMock); + } catch (BaseServiceException ex) { + assertEquals(503, ex.code()); + assertEquals("message", ex.getMessage()); + assertTrue(ex.retryable()); + assertTrue(ex.idempotent()); + } finally { + verify(exceptionMock); + } + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java new file mode 100644 index 000000000000..8af8c700cd8c --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java @@ -0,0 +1,1058 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.easymock.EasyMock.capture; +import static org.easymock.EasyMock.eq; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; + +import com.google.api.services.bigquery.model.Dataset; +import com.google.api.services.bigquery.model.ErrorProto; +import com.google.api.services.bigquery.model.Job; +import com.google.api.services.bigquery.model.Table; +import com.google.api.services.bigquery.model.TableCell; +import com.google.api.services.bigquery.model.TableDataInsertAllRequest; +import com.google.api.services.bigquery.model.TableDataInsertAllResponse; +import com.google.api.services.bigquery.model.TableRow; +import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import com.google.gcloud.Page; +import com.google.gcloud.RetryParams; +import com.google.gcloud.WriteChannel; +import com.google.gcloud.bigquery.InsertAllRequest.RowToInsert; +import com.google.gcloud.spi.BigQueryRpc; +import com.google.gcloud.spi.BigQueryRpc.Tuple; +import com.google.gcloud.spi.BigQueryRpcFactory; + +import org.easymock.Capture; +import org.easymock.EasyMock; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.math.BigInteger; +import java.util.List; +import java.util.Map; + +public class BigQueryImplTest { + + private static final String PROJECT = "project"; + private static final String DATASET = "dataset"; + private static final String TABLE = "table"; + private static final String JOB = "job"; + private static final String OTHER_TABLE = "otherTable"; + private static final String OTHER_DATASET = "otherDataset"; + private static final List ACCESS_RULES = ImmutableList.of( + Acl.of(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER), + Acl.of(new Acl.View(TableId.of("dataset", "table")), Acl.Role.WRITER)); + private static final List ACCESS_RULES_WITH_PROJECT = ImmutableList.of( + Acl.of(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER), + Acl.of(new Acl.View(TableId.of(PROJECT, "dataset", "table")))); + private static final DatasetInfo DATASET_INFO = DatasetInfo.builder(DATASET) + .acl(ACCESS_RULES) + .description("description") + .build(); + private static final DatasetInfo DATASET_INFO_WITH_PROJECT = DatasetInfo.builder(PROJECT, DATASET) + .acl(ACCESS_RULES_WITH_PROJECT) + .description("description") + .build(); + private static final DatasetInfo OTHER_DATASET_INFO = DatasetInfo.builder(PROJECT, OTHER_DATASET) + .acl(ACCESS_RULES) + .description("other description") + .build(); + private static final TableId TABLE_ID = TableId.of(DATASET, TABLE); + private static final TableId OTHER_TABLE_ID = TableId.of(PROJECT, DATASET, OTHER_TABLE); + private static final TableId TABLE_ID_WITH_PROJECT = TableId.of(PROJECT, DATASET, TABLE); + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); + private static final TableInfo TABLE_INFO = TableInfo.of(TABLE_ID, TABLE_SCHEMA); + private static final TableInfo OTHER_TABLE_INFO = TableInfo.of(OTHER_TABLE_ID, TABLE_SCHEMA); + private static final TableInfo TABLE_INFO_WITH_PROJECT = + TableInfo.of(TABLE_ID_WITH_PROJECT, TABLE_SCHEMA); + private static final LoadJobConfiguration LOAD_JOB_CONFIGURATION = + LoadJobConfiguration.of(TABLE_ID, "URI"); + private static final LoadJobConfiguration LOAD_JOB_CONFIGURATION_WITH_PROJECT = + LoadJobConfiguration.of(TABLE_ID_WITH_PROJECT, "URI"); + private static final JobInfo LOAD_JOB = + JobInfo.of(LOAD_JOB_CONFIGURATION); + private static final JobInfo COMPLETE_LOAD_JOB = + JobInfo.of(JobId.of(PROJECT, JOB), LOAD_JOB_CONFIGURATION_WITH_PROJECT); + private static final CopyJobConfiguration COPY_JOB_CONFIGURATION = + CopyJobConfiguration.of(TABLE_ID, ImmutableList.of(TABLE_ID, TABLE_ID)); + private static final CopyJobConfiguration COPY_JOB_CONFIGURATION_WITH_PROJECT = + CopyJobConfiguration.of(TABLE_ID_WITH_PROJECT, ImmutableList.of(TABLE_ID_WITH_PROJECT, + TABLE_ID_WITH_PROJECT)); + private static final JobInfo COPY_JOB = JobInfo.of(COPY_JOB_CONFIGURATION); + private static final JobInfo COMPLETE_COPY_JOB = + JobInfo.of(JobId.of(PROJECT, JOB), COPY_JOB_CONFIGURATION_WITH_PROJECT); + private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION = + QueryJobConfiguration.builder("SQL") + .defaultDataset(DatasetId.of(DATASET)) + .destinationTable(TABLE_ID) + .build(); + private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION_WITH_PROJECT = + QueryJobConfiguration.builder("SQL") + .defaultDataset(DatasetId.of(PROJECT, DATASET)) + .destinationTable(TABLE_ID_WITH_PROJECT) + .build(); + private static final JobInfo QUERY_JOB = JobInfo.of(QUERY_JOB_CONFIGURATION); + private static final JobInfo COMPLETE_QUERY_JOB = + JobInfo.of(JobId.of(PROJECT, JOB), QUERY_JOB_CONFIGURATION_WITH_PROJECT); + private static final ExtractJobConfiguration EXTRACT_JOB_CONFIGURATION = + ExtractJobConfiguration.of(TABLE_ID, "URI"); + private static final ExtractJobConfiguration EXTRACT_JOB_CONFIGURATION_WITH_PROJECT = + ExtractJobConfiguration.of(TABLE_ID_WITH_PROJECT, "URI"); + private static final JobInfo EXTRACT_JOB = JobInfo.of(EXTRACT_JOB_CONFIGURATION); + private static final JobInfo COMPLETE_EXTRACT_JOB = + JobInfo.of(JobId.of(PROJECT, JOB), EXTRACT_JOB_CONFIGURATION_WITH_PROJECT); + private static final TableCell BOOLEAN_FIELD = new TableCell().setV("false"); + private static final TableCell INTEGER_FIELD = new TableCell().setV("1"); + private static final TableRow TABLE_ROW = + new TableRow().setF(ImmutableList.of(BOOLEAN_FIELD, INTEGER_FIELD)); + private static final QueryRequest QUERY_REQUEST = QueryRequest.builder("SQL") + .maxResults(42L) + .useQueryCache(false) + .defaultDataset(DatasetId.of(DATASET)) + .build(); + private static final QueryRequest QUERY_REQUEST_WITH_PROJECT = QueryRequest.builder("SQL") + .maxResults(42L) + .useQueryCache(false) + .defaultDataset(DatasetId.of(PROJECT, DATASET)) + .build(); + + // Empty BigQueryRpc options + private static final Map EMPTY_RPC_OPTIONS = ImmutableMap.of(); + + // Dataset options + private static final BigQuery.DatasetOption DATASET_OPTION_FIELDS = + BigQuery.DatasetOption.fields(BigQuery.DatasetField.ACCESS, BigQuery.DatasetField.ETAG); + + // Dataset list options + private static final BigQuery.DatasetListOption DATASET_LIST_ALL = + BigQuery.DatasetListOption.all(); + private static final BigQuery.DatasetListOption DATASET_LIST_PAGE_TOKEN = + BigQuery.DatasetListOption.startPageToken("cursor"); + private static final BigQuery.DatasetListOption DATASET_LIST_MAX_RESULTS = + BigQuery.DatasetListOption.maxResults(42L); + private static final Map DATASET_LIST_OPTIONS = ImmutableMap.of( + BigQueryRpc.Option.ALL_DATASETS, true, + BigQueryRpc.Option.PAGE_TOKEN, "cursor", + BigQueryRpc.Option.MAX_RESULTS, 42L); + + // Dataset delete options + private static final BigQuery.DatasetDeleteOption DATASET_DELETE_CONTENTS = + BigQuery.DatasetDeleteOption.deleteContents(); + private static final Map DATASET_DELETE_OPTIONS = ImmutableMap.of( + BigQueryRpc.Option.DELETE_CONTENTS, true); + + // Table options + private static final BigQuery.TableOption TABLE_OPTION_FIELDS = + BigQuery.TableOption.fields(BigQuery.TableField.SCHEMA, BigQuery.TableField.ETAG); + + // Table list options + private static final BigQuery.TableListOption TABLE_LIST_MAX_RESULTS = + BigQuery.TableListOption.maxResults(42L); + private static final BigQuery.TableListOption TABLE_LIST_PAGE_TOKEN = + BigQuery.TableListOption.startPageToken("cursor"); + private static final Map TABLE_LIST_OPTIONS = ImmutableMap.of( + BigQueryRpc.Option.MAX_RESULTS, 42L, + BigQueryRpc.Option.PAGE_TOKEN, "cursor"); + + // TableData list options + private static final BigQuery.TableDataListOption TABLE_DATA_LIST_MAX_RESULTS = + BigQuery.TableDataListOption.maxResults(42L); + private static final BigQuery.TableDataListOption TABLE_DATA_LIST_PAGE_TOKEN = + BigQuery.TableDataListOption.startPageToken("cursor"); + private static final BigQuery.TableDataListOption TABLE_DATA_LIST_START_INDEX = + BigQuery.TableDataListOption.startIndex(0L); + private static final Map TABLE_DATA_LIST_OPTIONS = ImmutableMap.of( + BigQueryRpc.Option.MAX_RESULTS, 42L, + BigQueryRpc.Option.PAGE_TOKEN, "cursor", + BigQueryRpc.Option.START_INDEX, 0L); + + // Job options + private static final BigQuery.JobOption JOB_OPTION_FIELDS = + BigQuery.JobOption.fields(BigQuery.JobField.USER_EMAIL); + + // Job list options + private static final BigQuery.JobListOption JOB_LIST_OPTION_FIELD = + BigQuery.JobListOption.fields(BigQuery.JobField.STATISTICS); + private static final BigQuery.JobListOption JOB_LIST_ALL_USERS = + BigQuery.JobListOption.allUsers(); + private static final BigQuery.JobListOption JOB_LIST_STATE_FILTER = + BigQuery.JobListOption.stateFilter(JobStatus.State.DONE, JobStatus.State.PENDING); + private static final BigQuery.JobListOption JOB_LIST_PAGE_TOKEN = + BigQuery.JobListOption.startPageToken("cursor"); + private static final BigQuery.JobListOption JOB_LIST_MAX_RESULTS = + BigQuery.JobListOption.maxResults(42L); + private static final Map JOB_LIST_OPTIONS = ImmutableMap.of( + BigQueryRpc.Option.ALL_USERS, true, + BigQueryRpc.Option.STATE_FILTER, ImmutableList.of("done", "pending"), + BigQueryRpc.Option.PAGE_TOKEN, "cursor", + BigQueryRpc.Option.MAX_RESULTS, 42L); + + // Query Results options + private static final BigQuery.QueryResultsOption QUERY_RESULTS_OPTION_TIME = + BigQuery.QueryResultsOption.maxWaitTime(42L); + private static final BigQuery.QueryResultsOption QUERY_RESULTS_OPTION_INDEX = + BigQuery.QueryResultsOption.startIndex(1024L); + private static final BigQuery.QueryResultsOption QUERY_RESULTS_OPTION_PAGE_TOKEN = + BigQuery.QueryResultsOption.startPageToken("cursor"); + private static final BigQuery.QueryResultsOption QUERY_RESULTS_OPTION_MAX_RESULTS = + BigQuery.QueryResultsOption.maxResults(0L); + private static final Map QUERY_RESULTS_OPTIONS = ImmutableMap.of( + BigQueryRpc.Option.TIMEOUT, 42L, + BigQueryRpc.Option.START_INDEX, 1024L, + BigQueryRpc.Option.PAGE_TOKEN, "cursor", + BigQueryRpc.Option.MAX_RESULTS, 0L); + + private BigQueryOptions options; + private BigQueryRpcFactory rpcFactoryMock; + private BigQueryRpc bigqueryRpcMock; + private BigQuery bigquery; + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @Before + public void setUp() { + rpcFactoryMock = EasyMock.createMock(BigQueryRpcFactory.class); + bigqueryRpcMock = EasyMock.createMock(BigQueryRpc.class); + EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(BigQueryOptions.class))) + .andReturn(bigqueryRpcMock); + EasyMock.replay(rpcFactoryMock); + options = BigQueryOptions.builder() + .projectId(PROJECT) + .serviceRpcFactory(rpcFactoryMock) + .retryParams(RetryParams.noRetries()) + .build(); + } + + @After + public void tearDown() { + EasyMock.verify(rpcFactoryMock, bigqueryRpcMock); + } + + @Test + public void testGetOptions() { + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertSame(options, bigquery.options()); + } + + @Test + public void testCreateDataset() { + EasyMock.expect(bigqueryRpcMock.create(DATASET_INFO_WITH_PROJECT.toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(DATASET_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + DatasetInfo dataset = bigquery.create(DATASET_INFO); + assertEquals(DATASET_INFO_WITH_PROJECT, dataset); + } + + @Test + public void testCreateDatasetWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + EasyMock.expect( + bigqueryRpcMock.create(eq(DATASET_INFO_WITH_PROJECT.toPb()), capture(capturedOptions))) + .andReturn(DATASET_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + DatasetInfo dataset = bigquery.create(DATASET_INFO, DATASET_OPTION_FIELDS); + String selector = (String) capturedOptions.getValue().get(DATASET_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("datasetReference")); + assertTrue(selector.contains("access")); + assertTrue(selector.contains("etag")); + assertEquals(28, selector.length()); + assertEquals(DATASET_INFO_WITH_PROJECT, dataset); + } + + @Test + public void testGetDataset() { + EasyMock.expect(bigqueryRpcMock.getDataset(DATASET, EMPTY_RPC_OPTIONS)) + .andReturn(DATASET_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + DatasetInfo dataset = bigquery.getDataset(DATASET); + assertEquals(DATASET_INFO_WITH_PROJECT, dataset); + } + + @Test + public void testGetDatasetFromDatasetId() { + EasyMock.expect(bigqueryRpcMock.getDataset(DATASET, EMPTY_RPC_OPTIONS)) + .andReturn(DATASET_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + DatasetInfo dataset = bigquery.getDataset(DatasetId.of(PROJECT, DATASET)); + assertEquals(DATASET_INFO_WITH_PROJECT, dataset); + } + + @Test + public void testGetDatasetWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + EasyMock.expect(bigqueryRpcMock.getDataset(eq(DATASET), capture(capturedOptions))) + .andReturn(DATASET_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + DatasetInfo dataset = bigquery.getDataset(DATASET, DATASET_OPTION_FIELDS); + String selector = (String) capturedOptions.getValue().get(DATASET_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("datasetReference")); + assertTrue(selector.contains("access")); + assertTrue(selector.contains("etag")); + assertEquals(28, selector.length()); + assertEquals(DATASET_INFO_WITH_PROJECT, dataset); + } + + @Test + public void testListDatasets() { + String cursor = "cursor"; + ImmutableList datasetList = ImmutableList.of(DATASET_INFO_WITH_PROJECT, + OTHER_DATASET_INFO); + Tuple> result = + Tuple.of(cursor, Iterables.transform(datasetList, DatasetInfo.TO_PB_FUNCTION)); + EasyMock.expect(bigqueryRpcMock.listDatasets(EMPTY_RPC_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page page = bigquery.listDatasets(); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(datasetList.toArray(), Iterables.toArray(page.values(), DatasetInfo.class)); + } + + @Test + public void testListEmptyDatasets() { + ImmutableList datasets = ImmutableList.of(); + Tuple> result = Tuple.>of(null, datasets); + EasyMock.expect(bigqueryRpcMock.listDatasets(EMPTY_RPC_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page page = bigquery.listDatasets(); + assertNull(page.nextPageCursor()); + assertArrayEquals(ImmutableList.of().toArray(), + Iterables.toArray(page.values(), DatasetInfo.class)); + } + + @Test + public void testListDatasetsWithOptions() { + String cursor = "cursor"; + ImmutableList datasetList = ImmutableList.of(DATASET_INFO_WITH_PROJECT, + OTHER_DATASET_INFO); + Tuple> result = + Tuple.of(cursor, Iterables.transform(datasetList, DatasetInfo.TO_PB_FUNCTION)); + EasyMock.expect(bigqueryRpcMock.listDatasets(DATASET_LIST_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page page = bigquery.listDatasets(DATASET_LIST_ALL, DATASET_LIST_PAGE_TOKEN, + DATASET_LIST_MAX_RESULTS); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(datasetList.toArray(), Iterables.toArray(page.values(), DatasetInfo.class)); + } + + @Test + public void testDeleteDataset() { + EasyMock.expect(bigqueryRpcMock.deleteDataset(DATASET, EMPTY_RPC_OPTIONS)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.delete(DATASET)); + } + + @Test + public void testDeleteDatasetFromDatasetId() { + EasyMock.expect(bigqueryRpcMock.deleteDataset(DATASET, EMPTY_RPC_OPTIONS)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.delete(DatasetId.of(PROJECT, DATASET))); + } + + @Test + public void testDeleteDatasetWithOptions() { + EasyMock.expect(bigqueryRpcMock.deleteDataset(DATASET, DATASET_DELETE_OPTIONS)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.delete(DATASET, DATASET_DELETE_CONTENTS)); + } + + @Test + public void testUpdateDataset() { + DatasetInfo updatedDatasetInfo = DATASET_INFO.toBuilder().description("newDescription").build(); + DatasetInfo updatedDatasetInfoWithProject = DATASET_INFO_WITH_PROJECT.toBuilder() + .description("newDescription") + .build(); + EasyMock.expect(bigqueryRpcMock.patch(updatedDatasetInfoWithProject.toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(updatedDatasetInfoWithProject.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + DatasetInfo dataset = bigquery.update(updatedDatasetInfo); + assertEquals(updatedDatasetInfoWithProject, dataset); + } + + @Test + public void testUpdateDatasetWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + DatasetInfo updatedDatasetInfo = DATASET_INFO.toBuilder().description("newDescription").build(); + DatasetInfo updatedDatasetInfoWithProject = DATASET_INFO_WITH_PROJECT.toBuilder() + .description("newDescription") + .build(); + EasyMock.expect( + bigqueryRpcMock.patch(eq(updatedDatasetInfoWithProject.toPb()), capture(capturedOptions))) + .andReturn(updatedDatasetInfoWithProject.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + DatasetInfo dataset = bigquery.update(updatedDatasetInfo, DATASET_OPTION_FIELDS); + String selector = (String) capturedOptions.getValue().get(DATASET_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("datasetReference")); + assertTrue(selector.contains("access")); + assertTrue(selector.contains("etag")); + assertEquals(28, selector.length()); + assertEquals(updatedDatasetInfoWithProject, dataset); + } + + @Test + public void testCreateTable() { + EasyMock.expect(bigqueryRpcMock.create(TABLE_INFO_WITH_PROJECT.toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(TABLE_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + TableInfo table = bigquery.create(TABLE_INFO); + assertEquals(TABLE_INFO_WITH_PROJECT, table); + } + + @Test + public void testCreateTableWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + EasyMock.expect( + bigqueryRpcMock.create(eq(TABLE_INFO_WITH_PROJECT.toPb()), capture(capturedOptions))) + .andReturn(TABLE_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + TableInfo table = bigquery.create(TABLE_INFO, TABLE_OPTION_FIELDS); + String selector = (String) capturedOptions.getValue().get(TABLE_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("tableReference")); + assertTrue(selector.contains("schema")); + assertTrue(selector.contains("etag")); + assertEquals(31, selector.length()); + assertEquals(TABLE_INFO_WITH_PROJECT, table); + } + + @Test + public void testGetTable() { + EasyMock.expect(bigqueryRpcMock.getTable(DATASET, TABLE, EMPTY_RPC_OPTIONS)) + .andReturn(TABLE_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + TableInfo table = bigquery.getTable(DATASET, TABLE); + assertEquals(TABLE_INFO_WITH_PROJECT, table); + } + + @Test + public void testGetTableFromTableId() { + EasyMock.expect(bigqueryRpcMock.getTable(DATASET, TABLE, EMPTY_RPC_OPTIONS)) + .andReturn(TABLE_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + TableInfo table = bigquery.getTable(TABLE_ID); + assertEquals(TABLE_INFO_WITH_PROJECT, table); + } + + @Test + public void testGetTableWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + EasyMock.expect(bigqueryRpcMock.getTable(eq(DATASET), eq(TABLE), capture(capturedOptions))) + .andReturn(TABLE_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + TableInfo table = bigquery.getTable(TABLE_ID, TABLE_OPTION_FIELDS); + String selector = (String) capturedOptions.getValue().get(TABLE_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("tableReference")); + assertTrue(selector.contains("schema")); + assertTrue(selector.contains("etag")); + assertEquals(31, selector.length()); + assertEquals(TABLE_INFO_WITH_PROJECT, table); + } + + @Test + public void testListTables() { + String cursor = "cursor"; + ImmutableList tableList = + ImmutableList.of(TABLE_INFO_WITH_PROJECT, OTHER_TABLE_INFO); + Tuple> result = + Tuple.of(cursor, Iterables.transform(tableList, BaseTableInfo.TO_PB_FUNCTION)); + EasyMock.expect(bigqueryRpcMock.listTables(DATASET, EMPTY_RPC_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page page = bigquery.listTables(DATASET); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(tableList.toArray(), Iterables.toArray(page.values(), BaseTableInfo.class)); + } + + @Test + public void testListTablesFromDatasetId() { + String cursor = "cursor"; + ImmutableList tableList = + ImmutableList.of(TABLE_INFO_WITH_PROJECT, OTHER_TABLE_INFO); + Tuple> result = + Tuple.of(cursor, Iterables.transform(tableList, BaseTableInfo.TO_PB_FUNCTION)); + EasyMock.expect(bigqueryRpcMock.listTables(DATASET, EMPTY_RPC_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page page = bigquery.listTables(DatasetId.of(PROJECT, DATASET)); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(tableList.toArray(), Iterables.toArray(page.values(), BaseTableInfo.class)); + } + + @Test + public void testListTablesWithOptions() { + String cursor = "cursor"; + ImmutableList tableList = + ImmutableList.of(TABLE_INFO_WITH_PROJECT, OTHER_TABLE_INFO); + Tuple> result = + Tuple.of(cursor, Iterables.transform(tableList, BaseTableInfo.TO_PB_FUNCTION)); + EasyMock.expect(bigqueryRpcMock.listTables(DATASET, TABLE_LIST_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page page = bigquery.listTables(DATASET, TABLE_LIST_MAX_RESULTS, + TABLE_LIST_PAGE_TOKEN); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(tableList.toArray(), Iterables.toArray(page.values(), BaseTableInfo.class)); + } + + @Test + public void testDeleteTable() { + EasyMock.expect(bigqueryRpcMock.deleteTable(DATASET, TABLE)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.delete(DATASET, TABLE)); + } + + @Test + public void testDeleteTableFromTableId() { + EasyMock.expect(bigqueryRpcMock.deleteTable(DATASET, TABLE)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.delete(TABLE_ID)); + } + + @Test + public void testUpdateTable() { + TableInfo updatedTableInfo = TABLE_INFO.toBuilder().description("newDescription").build(); + TableInfo updatedTableInfoWithProject = TABLE_INFO_WITH_PROJECT.toBuilder() + .description("newDescription") + .build(); + EasyMock.expect(bigqueryRpcMock.patch(updatedTableInfoWithProject.toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(updatedTableInfoWithProject.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + TableInfo table = bigquery.update(updatedTableInfo); + assertEquals(updatedTableInfoWithProject, table); + } + + @Test + public void testUpdateTableWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + TableInfo updatedTableInfo = TABLE_INFO.toBuilder().description("newDescription").build(); + TableInfo updatedTableInfoWithProject = TABLE_INFO_WITH_PROJECT.toBuilder() + .description("newDescription") + .build(); + EasyMock.expect(bigqueryRpcMock.patch(eq(updatedTableInfoWithProject.toPb()), + capture(capturedOptions))).andReturn(updatedTableInfoWithProject.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + TableInfo table = bigquery.update(updatedTableInfo, TABLE_OPTION_FIELDS); + String selector = (String) capturedOptions.getValue().get(TABLE_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("tableReference")); + assertTrue(selector.contains("schema")); + assertTrue(selector.contains("etag")); + assertEquals(31, selector.length()); + assertEquals(updatedTableInfoWithProject, table); + } + + @Test + public void testInsertAll() { + Map row1 = ImmutableMap.of("field", "value1"); + Map row2 = ImmutableMap.of("field", "value2"); + List rows = ImmutableList.of( + new RowToInsert("row1", row1), + new RowToInsert("row2", row2) + ); + InsertAllRequest request = InsertAllRequest.builder(TABLE_ID) + .rows(rows) + .skipInvalidRows(false) + .ignoreUnknownValues(true) + .templateSuffix("suffix") + .build(); + TableDataInsertAllRequest requestPb = new TableDataInsertAllRequest().setRows( + Lists.transform(rows, new Function() { + @Override + public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { + return new TableDataInsertAllRequest.Rows().setInsertId(rowToInsert.id()) + .setJson(rowToInsert.content()); + } + }) + ).setSkipInvalidRows(false).setIgnoreUnknownValues(true).setTemplateSuffix("suffix"); + TableDataInsertAllResponse responsePb = new TableDataInsertAllResponse().setInsertErrors( + ImmutableList.of(new TableDataInsertAllResponse.InsertErrors().setIndex(0L).setErrors( + ImmutableList.of(new ErrorProto().setMessage("ErrorMessage"))))); + EasyMock.expect(bigqueryRpcMock.insertAll(DATASET, TABLE, requestPb)) + .andReturn(responsePb); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + InsertAllResponse response = bigquery.insertAll(request); + assertNotNull(response.errorsFor(0L)); + assertNull(response.errorsFor(1L)); + assertEquals(1, response.errorsFor(0L).size()); + assertEquals("ErrorMessage", response.errorsFor(0L).get(0).message()); + } + + @Test + public void testListTableData() { + String cursor = "cursor"; + com.google.api.services.bigquery.model.TableCell cell1 = + new com.google.api.services.bigquery.model.TableCell().setV("Value1"); + com.google.api.services.bigquery.model.TableCell cell2 = + new com.google.api.services.bigquery.model.TableCell().setV("Value2"); + ImmutableList> tableData = ImmutableList.of( + (List) ImmutableList.of(FieldValue.fromPb(cell1)), + ImmutableList.of(FieldValue.fromPb(cell2))); + Tuple> result = + Tuple.>of(cursor, + ImmutableList.of( + new com.google.api.services.bigquery.model.TableRow().setF( + ImmutableList.of(new com.google.api.services.bigquery.model.TableCell() + .setV("Value1"))), + new com.google.api.services.bigquery.model.TableRow().setF( + ImmutableList.of(new com.google.api.services.bigquery.model.TableCell() + .setV("Value2"))))); + EasyMock.expect(bigqueryRpcMock.listTableData(DATASET, TABLE, EMPTY_RPC_OPTIONS)) + .andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page> page = bigquery.listTableData(DATASET, TABLE); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(tableData.toArray(), Iterables.toArray(page.values(), List.class)); + } + + @Test + public void testListTableDataFromTableId() { + String cursor = "cursor"; + com.google.api.services.bigquery.model.TableCell cell1 = + new com.google.api.services.bigquery.model.TableCell().setV("Value1"); + com.google.api.services.bigquery.model.TableCell cell2 = + new com.google.api.services.bigquery.model.TableCell().setV("Value2"); + ImmutableList> tableData = ImmutableList.of( + (List) ImmutableList.of(FieldValue.fromPb(cell1)), + ImmutableList.of(FieldValue.fromPb(cell2))); + Tuple> result = + Tuple.>of(cursor, + ImmutableList.of( + new com.google.api.services.bigquery.model.TableRow().setF( + ImmutableList.of(new com.google.api.services.bigquery.model.TableCell() + .setV("Value1"))), + new com.google.api.services.bigquery.model.TableRow().setF( + ImmutableList.of(new com.google.api.services.bigquery.model.TableCell() + .setV("Value2"))))); + EasyMock.expect(bigqueryRpcMock.listTableData(DATASET, TABLE, EMPTY_RPC_OPTIONS)) + .andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page> page = bigquery.listTableData(TableId.of(DATASET, TABLE)); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(tableData.toArray(), Iterables.toArray(page.values(), List.class)); + } + + @Test + public void testListTableDataWithOptions() { + String cursor = "cursor"; + com.google.api.services.bigquery.model.TableCell cell1 = + new com.google.api.services.bigquery.model.TableCell().setV("Value1"); + com.google.api.services.bigquery.model.TableCell cell2 = + new com.google.api.services.bigquery.model.TableCell().setV("Value2"); + ImmutableList> tableData = ImmutableList.of( + (List) ImmutableList.of(FieldValue.fromPb(cell1)), + ImmutableList.of(FieldValue.fromPb(cell2))); + Tuple> result = + Tuple.>of(cursor, + ImmutableList.of( + new com.google.api.services.bigquery.model.TableRow().setF( + ImmutableList.of(new com.google.api.services.bigquery.model.TableCell() + .setV("Value1"))), + new com.google.api.services.bigquery.model.TableRow().setF( + ImmutableList.of(new com.google.api.services.bigquery.model.TableCell() + .setV("Value2"))))); + EasyMock.expect(bigqueryRpcMock.listTableData(DATASET, TABLE, TABLE_DATA_LIST_OPTIONS)) + .andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page> page = bigquery.listTableData(DATASET, TABLE, + TABLE_DATA_LIST_MAX_RESULTS, TABLE_DATA_LIST_PAGE_TOKEN, TABLE_DATA_LIST_START_INDEX); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(tableData.toArray(), Iterables.toArray(page.values(), List.class)); + } + + @Test + public void testCreateQueryJob() { + EasyMock.expect(bigqueryRpcMock.create( + JobInfo.of(QUERY_JOB_CONFIGURATION_WITH_PROJECT).toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_QUERY_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + JobInfo job = bigquery.create(QUERY_JOB); + assertEquals(COMPLETE_QUERY_JOB, job); + } + + @Test + public void testCreateLoadJob() { + EasyMock.expect(bigqueryRpcMock.create( + JobInfo.of(LOAD_JOB_CONFIGURATION_WITH_PROJECT).toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_LOAD_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + JobInfo job = bigquery.create(LOAD_JOB); + assertEquals(COMPLETE_LOAD_JOB, job); + } + + @Test + public void testCreateCopyJob() { + EasyMock.expect(bigqueryRpcMock.create( + JobInfo.of(COPY_JOB_CONFIGURATION_WITH_PROJECT).toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_COPY_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + JobInfo job = bigquery.create(COPY_JOB); + assertEquals(COMPLETE_COPY_JOB, job); + } + + @Test + public void testCreateExtractJob() { + EasyMock.expect(bigqueryRpcMock.create( + JobInfo.of(EXTRACT_JOB_CONFIGURATION_WITH_PROJECT).toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_EXTRACT_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + JobInfo job = bigquery.create(EXTRACT_JOB); + assertEquals(COMPLETE_EXTRACT_JOB, job); + } + + @Test + public void testCreateJobWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + EasyMock.expect(bigqueryRpcMock.create( + eq(JobInfo.of(QUERY_JOB_CONFIGURATION_WITH_PROJECT).toPb()), capture(capturedOptions))) + .andReturn(COMPLETE_QUERY_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + JobInfo job = bigquery.create(QUERY_JOB, JOB_OPTION_FIELDS); + assertEquals(COMPLETE_QUERY_JOB, job); + String selector = (String) capturedOptions.getValue().get(JOB_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("jobReference")); + assertTrue(selector.contains("configuration")); + assertTrue(selector.contains("user_email")); + assertEquals(37, selector.length()); + } + + @Test + public void testGetJob() { + EasyMock.expect(bigqueryRpcMock.getJob(JOB, EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_COPY_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + JobInfo job = bigquery.getJob(JOB); + assertEquals(COMPLETE_COPY_JOB, job); + } + + @Test + public void testGetJobFromJobId() { + EasyMock.expect(bigqueryRpcMock.getJob(JOB, EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_COPY_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + JobInfo job = bigquery.getJob(JobId.of(PROJECT, JOB)); + assertEquals(COMPLETE_COPY_JOB, job); + } + + @Test + public void testListJobs() { + String cursor = "cursor"; + ImmutableList jobList = ImmutableList.of(COMPLETE_QUERY_JOB, COMPLETE_LOAD_JOB); + Tuple> result = + Tuple.of(cursor, Iterables.transform(jobList, new Function() { + @Override + public Job apply(JobInfo jobInfo) { + return jobInfo.toPb(); + } + })); + EasyMock.expect(bigqueryRpcMock.listJobs(EMPTY_RPC_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page page = bigquery.listJobs(); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(jobList.toArray(), Iterables.toArray(page.values(), JobInfo.class)); + } + + @Test + public void testListJobsWithOptions() { + String cursor = "cursor"; + ImmutableList jobList = ImmutableList.of(COMPLETE_QUERY_JOB, COMPLETE_LOAD_JOB); + Tuple> result = + Tuple.of(cursor, Iterables.transform(jobList, new Function() { + @Override + public Job apply(JobInfo jobInfo) { + return jobInfo.toPb(); + } + })); + EasyMock.expect(bigqueryRpcMock.listJobs(JOB_LIST_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page page = bigquery.listJobs(JOB_LIST_ALL_USERS, JOB_LIST_STATE_FILTER, + JOB_LIST_PAGE_TOKEN, JOB_LIST_MAX_RESULTS); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(jobList.toArray(), Iterables.toArray(page.values(), JobInfo.class)); + } + + @Test + public void testListJobsWithSelectedFields() { + String cursor = "cursor"; + Capture> capturedOptions = Capture.newInstance(); + ImmutableList jobList = ImmutableList.of(COMPLETE_QUERY_JOB, COMPLETE_LOAD_JOB); + Tuple> result = + Tuple.of(cursor, Iterables.transform(jobList, new Function() { + @Override + public Job apply(JobInfo jobInfo) { + return jobInfo.toPb(); + } + })); + EasyMock.expect(bigqueryRpcMock.listJobs(capture(capturedOptions))).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page page = bigquery.listJobs(JOB_LIST_OPTION_FIELD); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(jobList.toArray(), Iterables.toArray(page.values(), JobInfo.class)); + String selector = (String) capturedOptions.getValue().get(JOB_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("etag,jobs(")); + assertTrue(selector.contains("configuration")); + assertTrue(selector.contains("jobReference")); + assertTrue(selector.contains("statistics")); + assertTrue(selector.contains("state,errorResult),nextPageToken")); + assertEquals(80, selector.length()); + } + + @Test + public void testCancelJob() { + EasyMock.expect(bigqueryRpcMock.cancel(JOB)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.cancel(JOB)); + } + + @Test + public void testCancelJobFromJobId() { + EasyMock.expect(bigqueryRpcMock.cancel(JOB)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.cancel(JobId.of(PROJECT, JOB))); + } + + @Test + public void testQueryRequest() { + JobId queryJob = JobId.of(PROJECT, JOB); + com.google.api.services.bigquery.model.QueryResponse responsePb = + new com.google.api.services.bigquery.model.QueryResponse() + .setJobReference(queryJob.toPb()) + .setJobComplete(false); + EasyMock.expect(bigqueryRpcMock.query(QUERY_REQUEST_WITH_PROJECT.toPb())).andReturn(responsePb); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + QueryResponse response = bigquery.query(QUERY_REQUEST); + assertNull(response.etag()); + assertNull(response.result()); + assertEquals(queryJob, response.jobId()); + assertEquals(false, response.jobCompleted()); + assertEquals(ImmutableList.of(), response.executionErrors()); + assertFalse(response.hasErrors()); + assertEquals(null, response.result()); + } + + @Test + public void testQueryRequestCompleted() { + JobId queryJob = JobId.of(PROJECT, JOB); + com.google.api.services.bigquery.model.QueryResponse responsePb = + new com.google.api.services.bigquery.model.QueryResponse() + .setJobReference(queryJob.toPb()) + .setRows(ImmutableList.of(TABLE_ROW)) + .setJobComplete(true) + .setCacheHit(false) + .setPageToken("cursor") + .setTotalBytesProcessed(42L) + .setTotalRows(BigInteger.valueOf(1L)); + EasyMock.expect(bigqueryRpcMock.query(QUERY_REQUEST_WITH_PROJECT.toPb())).andReturn(responsePb); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + QueryResponse response = bigquery.query(QUERY_REQUEST); + assertNull(response.etag()); + assertEquals(queryJob, response.jobId()); + assertEquals(true, response.jobCompleted()); + assertEquals(false, response.result().cacheHit()); + assertEquals(ImmutableList.of(), response.executionErrors()); + assertFalse(response.hasErrors()); + assertEquals(null, response.result().schema()); + assertEquals(42L, response.result().totalBytesProcessed()); + assertEquals(1L, response.result().totalRows()); + for (List row : response.result().values()) { + assertEquals(false, row.get(0).booleanValue()); + assertEquals(1L, row.get(1).longValue()); + } + assertEquals("cursor", response.result().nextPageCursor()); + } + + @Test + public void testGetQueryResults() { + JobId queryJob = JobId.of(PROJECT, JOB); + com.google.api.services.bigquery.model.GetQueryResultsResponse responsePb = + new com.google.api.services.bigquery.model.GetQueryResultsResponse() + .setEtag("etag") + .setJobReference(queryJob.toPb()) + .setRows(ImmutableList.of(TABLE_ROW)) + .setJobComplete(true) + .setCacheHit(false) + .setPageToken("cursor") + .setTotalBytesProcessed(42L) + .setTotalRows(BigInteger.valueOf(1L)); + EasyMock.expect(bigqueryRpcMock.getQueryResults(JOB, EMPTY_RPC_OPTIONS)).andReturn(responsePb); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + QueryResponse response = bigquery.getQueryResults(queryJob); + assertEquals("etag", response.etag()); + assertEquals(queryJob, response.jobId()); + assertEquals(true, response.jobCompleted()); + assertEquals(false, response.result().cacheHit()); + assertEquals(ImmutableList.of(), response.executionErrors()); + assertFalse(response.hasErrors()); + assertEquals(null, response.result().schema()); + assertEquals(42L, response.result().totalBytesProcessed()); + assertEquals(1L, response.result().totalRows()); + for (List row : response.result().values()) { + assertEquals(false, row.get(0).booleanValue()); + assertEquals(1L, row.get(1).longValue()); + } + assertEquals("cursor", response.result().nextPageCursor()); + } + + @Test + public void testGetQueryResultsWithOptions() { + JobId queryJob = JobId.of(PROJECT, JOB); + com.google.api.services.bigquery.model.GetQueryResultsResponse responsePb = + new com.google.api.services.bigquery.model.GetQueryResultsResponse() + .setJobReference(queryJob.toPb()) + .setRows(ImmutableList.of(TABLE_ROW)) + .setJobComplete(true) + .setCacheHit(false) + .setPageToken("cursor") + .setTotalBytesProcessed(42L) + .setTotalRows(BigInteger.valueOf(1L)); + EasyMock.expect(bigqueryRpcMock.getQueryResults(JOB, QUERY_RESULTS_OPTIONS)) + .andReturn(responsePb); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + QueryResponse response = bigquery.getQueryResults(queryJob, QUERY_RESULTS_OPTION_TIME, + QUERY_RESULTS_OPTION_INDEX, QUERY_RESULTS_OPTION_MAX_RESULTS, + QUERY_RESULTS_OPTION_PAGE_TOKEN); + assertEquals(queryJob, response.jobId()); + assertEquals(true, response.jobCompleted()); + assertEquals(false, response.result().cacheHit()); + assertEquals(ImmutableList.of(), response.executionErrors()); + assertFalse(response.hasErrors()); + assertEquals(null, response.result().schema()); + assertEquals(42L, response.result().totalBytesProcessed()); + assertEquals(1L, response.result().totalRows()); + for (List row : response.result().values()) { + assertEquals(false, row.get(0).booleanValue()); + assertEquals(1L, row.get(1).longValue()); + } + assertEquals("cursor", response.result().nextPageCursor()); + } + + @Test + public void testWriter() { + WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.of(TABLE_ID); + EasyMock.expect( + bigqueryRpcMock.open(WriteChannelConfiguration.of(TABLE_ID_WITH_PROJECT).toPb())) + .andReturn("upload-id"); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + WriteChannel channel = bigquery.writer(writeChannelConfiguration); + assertNotNull(channel); + assertTrue(channel.isOpen()); + } + + @Test + public void testRetryableException() { + EasyMock.expect(bigqueryRpcMock.getDataset(DATASET, EMPTY_RPC_OPTIONS)) + .andThrow(new BigQueryException(500, "InternalError")) + .andReturn(DATASET_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service(); + DatasetInfo dataset = bigquery.getDataset(DATASET); + assertEquals(DATASET_INFO_WITH_PROJECT, dataset); + } + + @Test + public void testNonRetryableException() { + String exceptionMessage = "Not Implemented"; + EasyMock.expect(bigqueryRpcMock.getDataset(DATASET, EMPTY_RPC_OPTIONS)) + .andThrow(new BigQueryException(501, exceptionMessage)); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service(); + thrown.expect(BigQueryException.class); + thrown.expectMessage(exceptionMessage); + bigquery.getDataset(DatasetId.of(DATASET)); + } + + @Test + public void testRuntimeException() { + String exceptionMessage = "Artificial runtime exception"; + EasyMock.expect(bigqueryRpcMock.getDataset(DATASET, EMPTY_RPC_OPTIONS)) + .andThrow(new RuntimeException(exceptionMessage)); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service(); + thrown.expect(BigQueryException.class); + thrown.expectMessage(exceptionMessage); + bigquery.getDataset(DATASET); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/CopyJobConfigurationTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/CopyJobConfigurationTest.java new file mode 100644 index 000000000000..3f3f6f0fd15c --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/CopyJobConfigurationTest.java @@ -0,0 +1,130 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; + +import org.junit.Test; + +import java.util.List; + +public class CopyJobConfigurationTest { + + private static final TableId SOURCE_TABLE = TableId.of("dataset", "sourceTable"); + private static final List SOURCE_TABLES = ImmutableList.of( + TableId.of("dataset", "sourceTable1"), + TableId.of("dataset", "sourceTable2")); + private static final TableId DESTINATION_TABLE = TableId.of("dataset", "destinationTable"); + private static final CreateDisposition CREATE_DISPOSITION = CreateDisposition.CREATE_IF_NEEDED; + private static final WriteDisposition WRITE_DISPOSITION = WriteDisposition.WRITE_APPEND; + private static final CopyJobConfiguration COPY_JOB_CONFIGURATION = + CopyJobConfiguration.builder(DESTINATION_TABLE, SOURCE_TABLE) + .createDisposition(CREATE_DISPOSITION) + .writeDisposition(WRITE_DISPOSITION) + .build(); + private static final CopyJobConfiguration COPY_JOB_CONFIGURATION_MULTIPLE_TABLES = + CopyJobConfiguration.builder(DESTINATION_TABLE, SOURCE_TABLES) + .createDisposition(CREATE_DISPOSITION) + .writeDisposition(WRITE_DISPOSITION) + .build(); + + @Test + public void testToBuilder() { + compareCopyJobConfiguration(COPY_JOB_CONFIGURATION, COPY_JOB_CONFIGURATION.toBuilder().build()); + compareCopyJobConfiguration(COPY_JOB_CONFIGURATION_MULTIPLE_TABLES, + COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.toBuilder().build()); + CopyJobConfiguration jobConfiguration = COPY_JOB_CONFIGURATION.toBuilder() + .destinationTable(TableId.of("dataset", "newTable")) + .build(); + assertEquals("newTable", jobConfiguration.destinationTable().table()); + jobConfiguration = jobConfiguration.toBuilder().destinationTable(DESTINATION_TABLE).build(); + compareCopyJobConfiguration(COPY_JOB_CONFIGURATION, jobConfiguration); + } + + @Test + public void testOf() { + CopyJobConfiguration job = CopyJobConfiguration.of(DESTINATION_TABLE, SOURCE_TABLES); + assertEquals(DESTINATION_TABLE, job.destinationTable()); + assertEquals(SOURCE_TABLES, job.sourceTables()); + job = CopyJobConfiguration.of(DESTINATION_TABLE, SOURCE_TABLE); + assertEquals(DESTINATION_TABLE, job.destinationTable()); + assertEquals(ImmutableList.of(SOURCE_TABLE), job.sourceTables()); + } + + @Test + public void testToBuilderIncomplete() { + CopyJobConfiguration jobConfiguration = + CopyJobConfiguration.of(DESTINATION_TABLE, SOURCE_TABLES); + compareCopyJobConfiguration(jobConfiguration, jobConfiguration.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(DESTINATION_TABLE, COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.destinationTable()); + assertEquals(SOURCE_TABLES, COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.sourceTables()); + assertEquals(CREATE_DISPOSITION, COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.createDisposition()); + assertEquals(WRITE_DISPOSITION, COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.writeDisposition()); + assertEquals(DESTINATION_TABLE, COPY_JOB_CONFIGURATION.destinationTable()); + assertEquals(ImmutableList.of(SOURCE_TABLE), COPY_JOB_CONFIGURATION.sourceTables()); + assertEquals(CREATE_DISPOSITION, COPY_JOB_CONFIGURATION.createDisposition()); + assertEquals(WRITE_DISPOSITION, COPY_JOB_CONFIGURATION.writeDisposition()); + } + + @Test + public void testToPbAndFromPb() { + assertNotNull(COPY_JOB_CONFIGURATION.toPb().getCopy()); + assertNull(COPY_JOB_CONFIGURATION.toPb().getExtract()); + assertNull(COPY_JOB_CONFIGURATION.toPb().getLoad()); + assertNull(COPY_JOB_CONFIGURATION.toPb().getQuery()); + assertNull(COPY_JOB_CONFIGURATION.toPb().getCopy().getSourceTables()); + assertNull(COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.toPb().getCopy().getSourceTable()); + compareCopyJobConfiguration(COPY_JOB_CONFIGURATION, + CopyJobConfiguration.fromPb(COPY_JOB_CONFIGURATION.toPb())); + compareCopyJobConfiguration(COPY_JOB_CONFIGURATION_MULTIPLE_TABLES, + CopyJobConfiguration.fromPb(COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.toPb())); + CopyJobConfiguration jobConfiguration = + CopyJobConfiguration.of(DESTINATION_TABLE, SOURCE_TABLES); + compareCopyJobConfiguration( + jobConfiguration, CopyJobConfiguration.fromPb(jobConfiguration.toPb())); + } + + @Test + public void testSetProjectId() { + CopyJobConfiguration configuration = COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.setProjectId("p"); + assertEquals("p", configuration.destinationTable().project()); + for (TableId sourceTable : configuration.sourceTables()) { + assertEquals("p", sourceTable.project()); + } + } + + private void compareCopyJobConfiguration(CopyJobConfiguration expected, + CopyJobConfiguration value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.destinationTable(), value.destinationTable()); + assertEquals(expected.sourceTables(), value.sourceTables()); + assertEquals(expected.createDisposition(), value.createDisposition()); + assertEquals(expected.writeDisposition(), value.writeDisposition()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/CsvOptionsTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/CsvOptionsTest.java new file mode 100644 index 000000000000..371202174431 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/CsvOptionsTest.java @@ -0,0 +1,87 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; + +public class CsvOptionsTest { + + private static final Boolean ALLOW_JAGGED_ROWS = true; + private static final Boolean ALLOW_QUOTED_NEWLINE = true; + private static final Charset ENCODING = StandardCharsets.UTF_8; + private static final String FIELD_DELIMITER = ","; + private static final String QUOTE = "\""; + private static final Integer SKIP_LEADING_ROWS = 42; + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder() + .allowJaggedRows(ALLOW_JAGGED_ROWS) + .allowQuotedNewLines(ALLOW_QUOTED_NEWLINE) + .encoding(ENCODING) + .fieldDelimiter(FIELD_DELIMITER) + .quote(QUOTE) + .skipLeadingRows(SKIP_LEADING_ROWS) + .build(); + + @Test + public void testToBuilder() { + compareCsvOptions(CSV_OPTIONS, CSV_OPTIONS.toBuilder().build()); + CsvOptions csvOptions = CSV_OPTIONS.toBuilder() + .fieldDelimiter(";") + .build(); + assertEquals(";", csvOptions.fieldDelimiter()); + csvOptions = csvOptions.toBuilder().fieldDelimiter(",").build(); + compareCsvOptions(CSV_OPTIONS, csvOptions); + } + + @Test + public void testToBuilderIncomplete() { + CsvOptions csvOptions = CsvOptions.builder().fieldDelimiter("|").build(); + assertEquals(csvOptions, csvOptions.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(FormatOptions.CSV, CSV_OPTIONS.type()); + assertEquals(ALLOW_JAGGED_ROWS, CSV_OPTIONS.allowJaggedRows()); + assertEquals(ALLOW_QUOTED_NEWLINE, CSV_OPTIONS.allowQuotedNewLines()); + assertEquals(ENCODING.name(), CSV_OPTIONS.encoding()); + assertEquals(FIELD_DELIMITER, CSV_OPTIONS.fieldDelimiter()); + assertEquals(QUOTE, CSV_OPTIONS.quote()); + assertEquals(SKIP_LEADING_ROWS, CSV_OPTIONS.skipLeadingRows()); + } + + @Test + public void testToAndFromPb() { + compareCsvOptions(CSV_OPTIONS, CsvOptions.fromPb(CSV_OPTIONS.toPb())); + CsvOptions csvOptions = CsvOptions.builder().allowJaggedRows(ALLOW_JAGGED_ROWS).build(); + compareCsvOptions(csvOptions, CsvOptions.fromPb(csvOptions.toPb())); + } + + private void compareCsvOptions(CsvOptions expected, CsvOptions value) { + assertEquals(expected, value); + assertEquals(expected.allowJaggedRows(), value.allowJaggedRows()); + assertEquals(expected.allowQuotedNewLines(), value.allowQuotedNewLines()); + assertEquals(expected.encoding(), value.encoding()); + assertEquals(expected.fieldDelimiter(), value.fieldDelimiter()); + assertEquals(expected.quote(), value.quote()); + assertEquals(expected.skipLeadingRows(), value.skipLeadingRows()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetIdTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetIdTest.java new file mode 100644 index 000000000000..ec645d71c96f --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetIdTest.java @@ -0,0 +1,59 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class DatasetIdTest { + + private static final DatasetId DATASET = DatasetId.of("dataset"); + private static final DatasetId DATASET_COMPLETE = DatasetId.of("project", "dataset"); + + @Test + public void testOf() { + assertEquals(null, DATASET.project()); + assertEquals("dataset", DATASET.dataset()); + assertEquals("project", DATASET_COMPLETE.project()); + assertEquals("dataset", DATASET_COMPLETE.dataset()); + } + + @Test + public void testEquals() { + compareDatasetIds(DATASET, DatasetId.of("dataset")); + compareDatasetIds(DATASET_COMPLETE, DatasetId.of("project", "dataset")); + } + + @Test + public void testToPbAndFromPb() { + compareDatasetIds(DATASET, DatasetId.fromPb(DATASET.toPb())); + compareDatasetIds(DATASET_COMPLETE, DatasetId.fromPb(DATASET_COMPLETE.toPb())); + } + + @Test + public void testSetProjectId() { + assertEquals(DATASET_COMPLETE, DATASET.setProjectId("project")); + } + + private void compareDatasetIds(DatasetId expected, DatasetId value) { + assertEquals(expected, value); + assertEquals(expected.project(), value.project()); + assertEquals(expected.dataset(), value.dataset()); + assertEquals(expected.hashCode(), value.hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetInfoTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetInfoTest.java new file mode 100644 index 000000000000..20875c0fc853 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetInfoTest.java @@ -0,0 +1,136 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class DatasetInfoTest { + + private static final List ACCESS_RULES = ImmutableList.of( + Acl.of(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER), + Acl.of(new Acl.View(TableId.of("dataset", "table")))); + private static final List ACCESS_RULES_COMPLETE = ImmutableList.of( + Acl.of(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER), + Acl.of(new Acl.View(TableId.of("project", "dataset", "table")))); + private static final Long CREATION_TIME = System.currentTimeMillis(); + private static final Long DEFAULT_TABLE_EXPIRATION = CREATION_TIME + 100; + private static final String DESCRIPTION = "description"; + private static final String ETAG = "0xFF00"; + private static final String FRIENDLY_NAME = "friendlyDataset"; + private static final String ID = "P/D:1"; + private static final Long LAST_MODIFIED = CREATION_TIME + 50; + private static final String LOCATION = ""; + private static final String SELF_LINK = "http://bigquery/p/d"; + private static final DatasetId DATASET_ID = DatasetId.of("dataset"); + private static final DatasetId DATASET_ID_COMPLETE = DatasetId.of("project", "dataset"); + private static final DatasetInfo DATASET_INFO = DatasetInfo.builder(DATASET_ID) + .acl(ACCESS_RULES) + .creationTime(CREATION_TIME) + .defaultTableLifetime(DEFAULT_TABLE_EXPIRATION) + .description(DESCRIPTION) + .etag(ETAG) + .friendlyName(FRIENDLY_NAME) + .id(ID) + .lastModified(LAST_MODIFIED) + .location(LOCATION) + .selfLink(SELF_LINK) + .build(); + private static final DatasetInfo DATASET_INFO_COMPLETE = DATASET_INFO.toBuilder() + .datasetId(DATASET_ID_COMPLETE) + .acl(ACCESS_RULES_COMPLETE) + .build(); + + @Test + public void testToBuilder() { + compareDatasets(DATASET_INFO, DATASET_INFO.toBuilder().build()); + DatasetInfo datasetInfo = DATASET_INFO.toBuilder() + .datasetId(DatasetId.of("dataset2")) + .description("description2") + .build(); + assertEquals(DatasetId.of("dataset2"), datasetInfo.datasetId()); + assertEquals("description2", datasetInfo.description()); + datasetInfo = datasetInfo.toBuilder().datasetId(DATASET_ID).description("description").build(); + compareDatasets(DATASET_INFO, datasetInfo); + } + + @Test + public void testToBuilderIncomplete() { + DatasetInfo datasetInfo = DatasetInfo.builder(DATASET_ID).build(); + assertEquals(datasetInfo, datasetInfo.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertNull(DATASET_INFO.datasetId().project()); + assertEquals(DATASET_ID, DATASET_INFO.datasetId()); + assertEquals(ACCESS_RULES, DATASET_INFO.acl()); + assertEquals(CREATION_TIME, DATASET_INFO.creationTime()); + assertEquals(DEFAULT_TABLE_EXPIRATION, DATASET_INFO.defaultTableLifetime()); + assertEquals(DESCRIPTION, DATASET_INFO.description()); + assertEquals(ETAG, DATASET_INFO.etag()); + assertEquals(FRIENDLY_NAME, DATASET_INFO.friendlyName()); + assertEquals(ID, DATASET_INFO.id()); + assertEquals(LAST_MODIFIED, DATASET_INFO.lastModified()); + assertEquals(LOCATION, DATASET_INFO.location()); + assertEquals(SELF_LINK, DATASET_INFO.selfLink()); + assertEquals(DATASET_ID_COMPLETE, DATASET_INFO_COMPLETE.datasetId()); + assertEquals(ACCESS_RULES_COMPLETE, DATASET_INFO_COMPLETE.acl()); + assertEquals(CREATION_TIME, DATASET_INFO_COMPLETE.creationTime()); + assertEquals(DEFAULT_TABLE_EXPIRATION, DATASET_INFO_COMPLETE.defaultTableLifetime()); + assertEquals(DESCRIPTION, DATASET_INFO_COMPLETE.description()); + assertEquals(ETAG, DATASET_INFO_COMPLETE.etag()); + assertEquals(FRIENDLY_NAME, DATASET_INFO_COMPLETE.friendlyName()); + assertEquals(ID, DATASET_INFO_COMPLETE.id()); + assertEquals(LAST_MODIFIED, DATASET_INFO_COMPLETE.lastModified()); + assertEquals(LOCATION, DATASET_INFO_COMPLETE.location()); + assertEquals(SELF_LINK, DATASET_INFO_COMPLETE.selfLink()); + } + + @Test + public void testToPbAndFromPb() { + compareDatasets(DATASET_INFO_COMPLETE, DatasetInfo.fromPb(DATASET_INFO_COMPLETE.toPb())); + DatasetInfo datasetInfo = DatasetInfo.builder("project", "dataset").build(); + compareDatasets(datasetInfo, DatasetInfo.fromPb(datasetInfo.toPb())); + } + + @Test + public void testSetProjectId() { + assertEquals(DATASET_INFO_COMPLETE, DATASET_INFO.setProjectId("project")); + } + + private void compareDatasets(DatasetInfo expected, DatasetInfo value) { + assertEquals(expected, value); + assertEquals(expected.datasetId(), value.datasetId()); + assertEquals(expected.description(), value.description()); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.friendlyName(), value.friendlyName()); + assertEquals(expected.id(), value.id()); + assertEquals(expected.location(), value.location()); + assertEquals(expected.selfLink(), value.selfLink()); + assertEquals(expected.acl(), value.acl()); + assertEquals(expected.creationTime(), value.creationTime()); + assertEquals(expected.defaultTableLifetime(), value.defaultTableLifetime()); + assertEquals(expected.lastModified(), value.lastModified()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetTest.java new file mode 100644 index 000000000000..544fc2378b23 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetTest.java @@ -0,0 +1,332 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.easymock.EasyMock.createStrictMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import com.google.gcloud.Page; +import com.google.gcloud.PageImpl; + +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.util.Iterator; +import java.util.List; + +public class DatasetTest { + + private static final DatasetId DATASET_ID = DatasetId.of("dataset"); + private static final DatasetInfo DATASET_INFO = DatasetInfo.builder(DATASET_ID).build(); + private static final Field FIELD = Field.of("FieldName", Field.Type.integer()); + private static final Iterable TABLE_INFO_RESULTS = ImmutableList.of( + TableInfo.builder(TableId.of("dataset", "table1"), Schema.of(FIELD)).build(), + ViewInfo.builder(TableId.of("dataset", "table2"), "QUERY").build(), + ExternalTableInfo.builder(TableId.of("dataset", "table2"), + ExternalDataConfiguration.of(ImmutableList.of("URI"), Schema.of(), FormatOptions.csv())) + .build()); + private static final UserDefinedFunction FUNCTION1 = UserDefinedFunction.inline("inline"); + private static final UserDefinedFunction FUNCTION2 = UserDefinedFunction.inline("gs://b/f"); + private static final List FUNCTIONS = ImmutableList.of(FUNCTION1, FUNCTION2); + + @Rule + public ExpectedException thrown = ExpectedException.none(); + private BigQuery bigquery; + private Dataset dataset; + + @Before + public void setUp() throws Exception { + bigquery = createStrictMock(BigQuery.class); + dataset = new Dataset(bigquery, DATASET_INFO); + } + + @After + public void tearDown() throws Exception { + verify(bigquery); + } + + @Test + public void testInfo() throws Exception { + assertEquals(DATASET_INFO, dataset.info()); + replay(bigquery); + } + + @Test + public void testBigQuery() throws Exception { + assertSame(bigquery, dataset.bigquery()); + replay(bigquery); + } + + @Test + public void testExists_True() throws Exception { + BigQuery.DatasetOption[] expectedOptions = {BigQuery.DatasetOption.fields()}; + expect(bigquery.getDataset(DATASET_ID, expectedOptions)).andReturn(DATASET_INFO); + replay(bigquery); + assertTrue(dataset.exists()); + } + + @Test + public void testExists_False() throws Exception { + BigQuery.DatasetOption[] expectedOptions = {BigQuery.DatasetOption.fields()}; + expect(bigquery.getDataset(DATASET_ID, expectedOptions)).andReturn(null); + replay(bigquery); + assertFalse(dataset.exists()); + } + + @Test + public void testReload() throws Exception { + DatasetInfo updatedInfo = DATASET_INFO.toBuilder().description("Description").build(); + expect(bigquery.getDataset(DATASET_ID.dataset())).andReturn(updatedInfo); + replay(bigquery); + Dataset updatedDataset = dataset.reload(); + assertSame(bigquery, updatedDataset.bigquery()); + assertEquals(updatedInfo, updatedDataset.info()); + } + + @Test + public void testReloadNull() throws Exception { + expect(bigquery.getDataset(DATASET_ID.dataset())).andReturn(null); + replay(bigquery); + assertNull(dataset.reload()); + } + + @Test + public void testReloadWithOptions() throws Exception { + DatasetInfo updatedInfo = DATASET_INFO.toBuilder().description("Description").build(); + expect(bigquery.getDataset(DATASET_ID.dataset(), BigQuery.DatasetOption.fields())) + .andReturn(updatedInfo); + replay(bigquery); + Dataset updatedDataset = dataset.reload(BigQuery.DatasetOption.fields()); + assertSame(bigquery, updatedDataset.bigquery()); + assertEquals(updatedInfo, updatedDataset.info()); + } + + @Test + public void testUpdate() throws Exception { + DatasetInfo updatedInfo = DATASET_INFO.toBuilder().description("Description").build(); + expect(bigquery.update(updatedInfo)).andReturn(updatedInfo); + replay(bigquery); + Dataset updatedDataset = dataset.update(updatedInfo); + assertSame(bigquery, updatedDataset.bigquery()); + assertEquals(updatedInfo, updatedDataset.info()); + } + + @Test + public void testUpdateWithDifferentId() throws Exception { + DatasetInfo updatedInfo = DATASET_INFO.toBuilder() + .datasetId(DatasetId.of("dataset2")) + .description("Description") + .build(); + replay(bigquery); + thrown.expect(IllegalArgumentException.class); + dataset.update(updatedInfo); + } + + @Test + public void testUpdateWithOptions() throws Exception { + DatasetInfo updatedInfo = DATASET_INFO.toBuilder().description("Description").build(); + expect(bigquery.update(updatedInfo, BigQuery.DatasetOption.fields())).andReturn(updatedInfo); + replay(bigquery); + Dataset updatedDataset = dataset.update(updatedInfo, BigQuery.DatasetOption.fields()); + assertSame(bigquery, updatedDataset.bigquery()); + assertEquals(updatedInfo, updatedDataset.info()); + } + + @Test + public void testDelete() throws Exception { + expect(bigquery.delete(DATASET_INFO.datasetId())).andReturn(true); + replay(bigquery); + assertTrue(dataset.delete()); + } + + @Test + public void testList() throws Exception { + BigQueryOptions bigqueryOptions = createStrictMock(BigQueryOptions.class); + PageImpl tableInfoPage = new PageImpl<>(null, "c", TABLE_INFO_RESULTS); + expect(bigquery.listTables(DATASET_INFO.datasetId())).andReturn(tableInfoPage); + expect(bigquery.options()).andReturn(bigqueryOptions); + expect(bigqueryOptions.service()).andReturn(bigquery); + replay(bigquery, bigqueryOptions); + Page
tablePage = dataset.list(); + Iterator tableInfoIterator = tableInfoPage.values().iterator(); + Iterator
tableIterator = tablePage.values().iterator(); + while (tableInfoIterator.hasNext() && tableIterator.hasNext()) { + assertEquals(tableInfoIterator.next(), tableIterator.next().info()); + } + assertFalse(tableInfoIterator.hasNext()); + assertFalse(tableIterator.hasNext()); + assertEquals(tableInfoPage.nextPageCursor(), tablePage.nextPageCursor()); + verify(bigqueryOptions); + } + + @Test + public void testListWithOptions() throws Exception { + BigQueryOptions bigqueryOptions = createStrictMock(BigQueryOptions.class); + PageImpl tableInfoPage = new PageImpl<>(null, "c", TABLE_INFO_RESULTS); + expect(bigquery.listTables(DATASET_INFO.datasetId(), BigQuery.TableListOption.maxResults(10L))) + .andReturn(tableInfoPage); + expect(bigquery.options()).andReturn(bigqueryOptions); + expect(bigqueryOptions.service()).andReturn(bigquery); + replay(bigquery, bigqueryOptions); + Page
tablePage = dataset.list(BigQuery.TableListOption.maxResults(10L)); + Iterator tableInfoIterator = tableInfoPage.values().iterator(); + Iterator
tableIterator = tablePage.values().iterator(); + while (tableInfoIterator.hasNext() && tableIterator.hasNext()) { + assertEquals(tableInfoIterator.next(), tableIterator.next().info()); + } + assertFalse(tableInfoIterator.hasNext()); + assertFalse(tableIterator.hasNext()); + assertEquals(tableInfoPage.nextPageCursor(), tablePage.nextPageCursor()); + verify(bigqueryOptions); + } + + @Test + public void testGet() throws Exception { + BaseTableInfo info = TableInfo.builder(TableId.of("dataset", "table1"), Schema.of()).build(); + expect(bigquery.getTable(TableId.of("dataset", "table1"))).andReturn(info); + replay(bigquery); + Table table = dataset.get("table1"); + assertNotNull(table); + assertEquals(info, table.info()); + } + + @Test + public void testGetNull() throws Exception { + expect(bigquery.getTable(TableId.of("dataset", "table1"))).andReturn(null); + replay(bigquery); + assertNull(dataset.get("table1")); + } + + @Test + public void testGetWithOptions() throws Exception { + BaseTableInfo info = TableInfo.builder(TableId.of("dataset", "table1"), Schema.of()).build(); + expect(bigquery.getTable(TableId.of("dataset", "table1"), BigQuery.TableOption.fields())) + .andReturn(info); + replay(bigquery); + Table table = dataset.get("table1", BigQuery.TableOption.fields()); + assertNotNull(table); + assertEquals(info, table.info()); + } + + @Test + public void testCreateTable() throws Exception { + TableInfo info = TableInfo.builder(TableId.of("dataset", "table1"), Schema.of(FIELD)).build(); + expect(bigquery.create(info)).andReturn(info); + replay(bigquery); + Table table = dataset.create("table1", Schema.of(FIELD)); + assertEquals(info, table.info()); + } + + @Test + public void testCreateTableWithOptions() throws Exception { + TableInfo info = TableInfo.builder(TableId.of("dataset", "table1"), Schema.of(FIELD)).build(); + expect(bigquery.create(info, BigQuery.TableOption.fields())).andReturn(info); + replay(bigquery); + Table table = dataset.create("table1", Schema.of(FIELD), BigQuery.TableOption.fields()); + assertEquals(info, table.info()); + } + + @Test + public void testCreateView() throws Exception { + ViewInfo info = ViewInfo.builder(TableId.of("dataset", "table2"), "QUERY").build(); + expect(bigquery.create(info)).andReturn(info); + replay(bigquery); + Table table = dataset.create("table2", "QUERY"); + assertEquals(info, table.info()); + } + + @Test + public void testCreateViewWithUserDefinedFunctions() throws Exception { + ViewInfo info = ViewInfo.builder(TableId.of("dataset", "table2"), "QUERY", FUNCTIONS).build(); + expect(bigquery.create(info)).andReturn(info); + replay(bigquery); + Table table = dataset.create("table2", "QUERY", FUNCTIONS); + assertEquals(info, table.info()); + } + + @Test + public void testCreateViewWithOptions() throws Exception { + ViewInfo info = ViewInfo.builder(TableId.of("dataset", "table2"), "QUERY").build(); + expect(bigquery.create(info, BigQuery.TableOption.fields())).andReturn(info); + replay(bigquery); + Table table = dataset.create("table2", "QUERY", BigQuery.TableOption.fields()); + assertEquals(info, table.info()); + } + + @Test + public void testCreateExternalTable() throws Exception { + ExternalTableInfo info = ExternalTableInfo.builder(TableId.of("dataset", "table3"), + ExternalDataConfiguration.of(ImmutableList.of("URI"), Schema.of(), FormatOptions.csv())) + .build(); + expect(bigquery.create(info)).andReturn(info); + replay(bigquery); + Table table = dataset.create("table3", ExternalDataConfiguration.of( + ImmutableList.of("URI"), Schema.of(), FormatOptions.csv())); + assertEquals(info, table.info()); + } + + @Test + public void testCreateExternalTableWithOptions() throws Exception { + ExternalTableInfo info = ExternalTableInfo.builder(TableId.of("dataset", "table3"), + ExternalDataConfiguration.of(ImmutableList.of("URI"), Schema.of(), FormatOptions.csv())) + .build(); + expect(bigquery.create(info, BigQuery.TableOption.fields())).andReturn(info); + replay(bigquery); + Table table = dataset.create("table3", ExternalDataConfiguration.of( + ImmutableList.of("URI"), Schema.of(), FormatOptions.csv()), BigQuery.TableOption.fields()); + assertEquals(info, table.info()); + } + + @Test + public void testStaticGet() throws Exception { + expect(bigquery.getDataset(DATASET_INFO.datasetId().dataset())).andReturn(DATASET_INFO); + replay(bigquery); + Dataset loadedDataset = Dataset.get(bigquery, DATASET_INFO.datasetId().dataset()); + assertNotNull(loadedDataset); + assertEquals(DATASET_INFO, loadedDataset.info()); + } + + @Test + public void testStaticGetNull() throws Exception { + expect(bigquery.getDataset(DATASET_INFO.datasetId().dataset())).andReturn(null); + replay(bigquery); + assertNull(Dataset.get(bigquery, DATASET_INFO.datasetId().dataset())); + } + + @Test + public void testStaticGetWithOptions() throws Exception { + expect(bigquery.getDataset(DATASET_INFO.datasetId().dataset(), BigQuery.DatasetOption.fields())) + .andReturn(DATASET_INFO); + replay(bigquery); + Dataset loadedDataset = Dataset.get(bigquery, DATASET_INFO.datasetId().dataset(), + BigQuery.DatasetOption.fields()); + assertNotNull(loadedDataset); + assertEquals(DATASET_INFO, loadedDataset.info()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ExternalDataConfigurationTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ExternalDataConfigurationTest.java new file mode 100644 index 000000000000..f9b7c31e1071 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ExternalDataConfigurationTest.java @@ -0,0 +1,103 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class ExternalDataConfigurationTest { + + private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); + private static final Integer MAX_BAD_RECORDS = 42; + private static final Boolean IGNORE_UNKNOWN_VALUES = true; + private static final String COMPRESSION = "GZIP"; + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder().build(); + private static final ExternalDataConfiguration CONFIGURATION = ExternalDataConfiguration + .builder(SOURCE_URIS, TABLE_SCHEMA, CSV_OPTIONS) + .compression(COMPRESSION) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .maxBadRecords(MAX_BAD_RECORDS) + .build(); + + @Test + public void testToBuilder() { + compareConfiguration(CONFIGURATION, CONFIGURATION.toBuilder().build()); + ExternalDataConfiguration configuration = CONFIGURATION.toBuilder().compression("NONE").build(); + assertEquals("NONE", configuration.compression()); + configuration = configuration.toBuilder() + .compression(COMPRESSION) + .build(); + compareConfiguration(CONFIGURATION, configuration); + } + + @Test + public void testToBuilderIncomplete() { + ExternalDataConfiguration configuration = + ExternalDataConfiguration.of(SOURCE_URIS, TABLE_SCHEMA, FormatOptions.json()); + assertEquals(configuration, configuration.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(COMPRESSION, CONFIGURATION.compression()); + assertEquals(CSV_OPTIONS, CONFIGURATION.formatOptions()); + assertEquals(IGNORE_UNKNOWN_VALUES, CONFIGURATION.ignoreUnknownValues()); + assertEquals(MAX_BAD_RECORDS, CONFIGURATION.maxBadRecords()); + assertEquals(TABLE_SCHEMA, CONFIGURATION.schema()); + assertEquals(SOURCE_URIS, CONFIGURATION.sourceUris()); + } + + @Test + public void testToAndFromPb() { + compareConfiguration(CONFIGURATION, ExternalDataConfiguration.fromPb(CONFIGURATION.toPb())); + ExternalDataConfiguration configuration = + ExternalDataConfiguration.builder(SOURCE_URIS, TABLE_SCHEMA, CSV_OPTIONS).build(); + compareConfiguration(configuration, ExternalDataConfiguration.fromPb(configuration.toPb())); + } + + private void compareConfiguration(ExternalDataConfiguration expected, + ExternalDataConfiguration value) { + assertEquals(expected, value); + assertEquals(expected.compression(), value.compression()); + assertEquals(expected.formatOptions(), value.formatOptions()); + assertEquals(expected.ignoreUnknownValues(), value.ignoreUnknownValues()); + assertEquals(expected.maxBadRecords(), value.maxBadRecords()); + assertEquals(expected.schema(), value.schema()); + assertEquals(expected.sourceUris(), value.sourceUris()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ExtractJobConfigurationTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ExtractJobConfigurationTest.java new file mode 100644 index 000000000000..7ac67f41b1f8 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ExtractJobConfigurationTest.java @@ -0,0 +1,139 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class ExtractJobConfigurationTest { + + private static final List DESTINATION_URIS = ImmutableList.of("uri1", "uri2"); + private static final String DESTINATION_URI = "uri1"; + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final String FIELD_DELIMITER = ","; + private static final String FORMAT = "CSV"; + private static final String JSON_FORMAT = "NEWLINE_DELIMITED_JSON"; + private static final Boolean PRINT_HEADER = true; + private static final String COMPRESSION = "GZIP"; + private static final ExtractJobConfiguration EXTRACT_CONFIGURATION = + ExtractJobConfiguration.builder(TABLE_ID, DESTINATION_URIS) + .printHeader(PRINT_HEADER) + .fieldDelimiter(FIELD_DELIMITER) + .compression(COMPRESSION) + .format(FORMAT) + .build(); + private static final ExtractJobConfiguration EXTRACT_CONFIGURATION_ONE_URI = + ExtractJobConfiguration.builder(TABLE_ID, DESTINATION_URI) + .printHeader(PRINT_HEADER) + .fieldDelimiter(FIELD_DELIMITER) + .compression(COMPRESSION) + .format(FORMAT) + .build(); + + @Test + public void testToBuilder() { + compareExtractJobConfiguration( + EXTRACT_CONFIGURATION, EXTRACT_CONFIGURATION.toBuilder().build()); + ExtractJobConfiguration job = EXTRACT_CONFIGURATION.toBuilder() + .sourceTable(TableId.of("dataset", "newTable")) + .build(); + assertEquals("newTable", job.sourceTable().table()); + job = job.toBuilder().sourceTable(TABLE_ID).build(); + compareExtractJobConfiguration(EXTRACT_CONFIGURATION, job); + } + + @Test + public void testOf() { + ExtractJobConfiguration job = ExtractJobConfiguration.of(TABLE_ID, DESTINATION_URIS); + assertEquals(TABLE_ID, job.sourceTable()); + assertEquals(DESTINATION_URIS, job.destinationUris()); + job = ExtractJobConfiguration.of(TABLE_ID, DESTINATION_URI); + assertEquals(TABLE_ID, job.sourceTable()); + assertEquals(ImmutableList.of(DESTINATION_URI), job.destinationUris()); + job = ExtractJobConfiguration.of(TABLE_ID, DESTINATION_URIS, JSON_FORMAT); + assertEquals(TABLE_ID, job.sourceTable()); + assertEquals(DESTINATION_URIS, job.destinationUris()); + assertEquals(JSON_FORMAT, job.format()); + job = ExtractJobConfiguration.of(TABLE_ID, DESTINATION_URI, JSON_FORMAT); + assertEquals(TABLE_ID, job.sourceTable()); + assertEquals(ImmutableList.of(DESTINATION_URI), job.destinationUris()); + assertEquals(JSON_FORMAT, job.format()); + } + + @Test + public void testToBuilderIncomplete() { + ExtractJobConfiguration job = ExtractJobConfiguration.of(TABLE_ID, DESTINATION_URIS); + compareExtractJobConfiguration(job, job.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(TABLE_ID, EXTRACT_CONFIGURATION.sourceTable()); + assertEquals(DESTINATION_URIS, EXTRACT_CONFIGURATION.destinationUris()); + assertEquals(FIELD_DELIMITER, EXTRACT_CONFIGURATION.fieldDelimiter()); + assertEquals(COMPRESSION, EXTRACT_CONFIGURATION.compression()); + assertEquals(PRINT_HEADER, EXTRACT_CONFIGURATION.printHeader()); + assertEquals(FORMAT, EXTRACT_CONFIGURATION.format()); + assertEquals(TABLE_ID, EXTRACT_CONFIGURATION_ONE_URI.sourceTable()); + assertEquals(ImmutableList.of(DESTINATION_URI), + EXTRACT_CONFIGURATION_ONE_URI.destinationUris()); + assertEquals(FIELD_DELIMITER, EXTRACT_CONFIGURATION_ONE_URI.fieldDelimiter()); + assertEquals(COMPRESSION, EXTRACT_CONFIGURATION_ONE_URI.compression()); + assertEquals(PRINT_HEADER, EXTRACT_CONFIGURATION_ONE_URI.printHeader()); + assertEquals(FORMAT, EXTRACT_CONFIGURATION_ONE_URI.format()); + } + + @Test + public void testToPbAndFromPb() { + assertNotNull(EXTRACT_CONFIGURATION.toPb().getExtract()); + assertNull(EXTRACT_CONFIGURATION.toPb().getCopy()); + assertNull(EXTRACT_CONFIGURATION.toPb().getLoad()); + assertNull(EXTRACT_CONFIGURATION.toPb().getQuery()); + compareExtractJobConfiguration(EXTRACT_CONFIGURATION, + ExtractJobConfiguration.fromPb(EXTRACT_CONFIGURATION.toPb())); + compareExtractJobConfiguration(EXTRACT_CONFIGURATION_ONE_URI, + ExtractJobConfiguration.fromPb(EXTRACT_CONFIGURATION_ONE_URI.toPb())); + ExtractJobConfiguration job = ExtractJobConfiguration.of(TABLE_ID, DESTINATION_URIS); + compareExtractJobConfiguration(job, ExtractJobConfiguration.fromPb(job.toPb())); + } + + @Test + public void testSetProjectId() { + ExtractJobConfiguration configuration = EXTRACT_CONFIGURATION.setProjectId("p"); + assertEquals("p", configuration.sourceTable().project()); + } + + private void compareExtractJobConfiguration(ExtractJobConfiguration expected, + ExtractJobConfiguration value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.sourceTable(), value.sourceTable()); + assertEquals(expected.destinationUris(), value.destinationUris()); + assertEquals(expected.compression(), value.compression()); + assertEquals(expected.printHeader(), value.printHeader()); + assertEquals(expected.fieldDelimiter(), value.fieldDelimiter()); + assertEquals(expected.format(), value.format()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FieldTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FieldTest.java new file mode 100644 index 000000000000..5f039eaed206 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FieldTest.java @@ -0,0 +1,106 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +public class FieldTest { + + private static final String FIELD_NAME1 = "StringField"; + private static final String FIELD_NAME2 = "IntegerField"; + private static final String FIELD_NAME3 = "RecordField"; + private static final Field.Type FIELD_TYPE1 = Field.Type.string(); + private static final Field.Type FIELD_TYPE2 = Field.Type.integer(); + private static final Field.Mode FIELD_MODE1 = Field.Mode.NULLABLE; + private static final Field.Mode FIELD_MODE2 = Field.Mode.REPEATED; + private static final Field.Mode FIELD_MODE3 = Field.Mode.REQUIRED; + private static final String FIELD_DESCRIPTION1 = "FieldDescription1"; + private static final String FIELD_DESCRIPTION2 = "FieldDescription2"; + private static final String FIELD_DESCRIPTION3 = "FieldDescription3"; + private static final Field FIELD_SCHEMA1 = Field.builder(FIELD_NAME1, FIELD_TYPE1) + .mode(FIELD_MODE1) + .description(FIELD_DESCRIPTION1) + .build(); + private static final Field FIELD_SCHEMA2 = Field.builder(FIELD_NAME2, FIELD_TYPE2) + .mode(FIELD_MODE2) + .description(FIELD_DESCRIPTION2) + .build(); + private static final Field.Type FIELD_TYPE3 = + Field.Type.record(ImmutableList.of(FIELD_SCHEMA1, FIELD_SCHEMA2)); + private static final Field FIELD_SCHEMA3 = Field + .builder(FIELD_NAME3, FIELD_TYPE3) + .mode(FIELD_MODE3) + .description(FIELD_DESCRIPTION3) + .build(); + + @Test + public void testToBuilder() { + compareFieldSchemas(FIELD_SCHEMA1, FIELD_SCHEMA1.toBuilder().build()); + compareFieldSchemas(FIELD_SCHEMA2, FIELD_SCHEMA2.toBuilder().build()); + compareFieldSchemas(FIELD_SCHEMA3, FIELD_SCHEMA3.toBuilder().build()); + Field field = FIELD_SCHEMA1.toBuilder() + .description("New Description") + .build(); + assertEquals("New Description", field.description()); + field = field.toBuilder().description(FIELD_DESCRIPTION1).build(); + compareFieldSchemas(FIELD_SCHEMA1, field); + } + + @Test + public void testToBuilderIncomplete() { + Field field = Field.of(FIELD_NAME1, FIELD_TYPE1); + compareFieldSchemas(field, field.toBuilder().build()); + field = Field.of(FIELD_NAME2, FIELD_TYPE3); + compareFieldSchemas(field, field.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(FIELD_NAME1, FIELD_SCHEMA1.name()); + assertEquals(FIELD_TYPE1, FIELD_SCHEMA1.type()); + assertEquals(FIELD_MODE1, FIELD_SCHEMA1.mode()); + assertEquals(FIELD_DESCRIPTION1, FIELD_SCHEMA1.description()); + assertEquals(null, FIELD_SCHEMA1.fields()); + assertEquals(FIELD_NAME3, FIELD_SCHEMA3.name()); + assertEquals(FIELD_TYPE3, FIELD_SCHEMA3.type()); + assertEquals(FIELD_MODE3, FIELD_SCHEMA3.mode()); + assertEquals(FIELD_DESCRIPTION3, FIELD_SCHEMA3.description()); + assertEquals(ImmutableList.of(FIELD_SCHEMA1, FIELD_SCHEMA2), FIELD_SCHEMA3.fields()); + } + + @Test + public void testToAndFromPb() { + compareFieldSchemas(FIELD_SCHEMA1, Field.fromPb(FIELD_SCHEMA1.toPb())); + compareFieldSchemas(FIELD_SCHEMA2, Field.fromPb(FIELD_SCHEMA2.toPb())); + compareFieldSchemas(FIELD_SCHEMA3, Field.fromPb(FIELD_SCHEMA3.toPb())); + Field field = Field.builder(FIELD_NAME1, FIELD_TYPE1).build(); + compareFieldSchemas(field, Field.fromPb(field.toPb())); + } + + private void compareFieldSchemas(Field expected, Field value) { + assertEquals(expected, value); + assertEquals(expected.name(), value.name()); + assertEquals(expected.type(), value.type()); + assertEquals(expected.mode(), value.mode()); + assertEquals(expected.description(), value.description()); + assertEquals(expected.fields(), value.fields()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FieldValueTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FieldValueTest.java new file mode 100644 index 000000000000..d6d879dbd58f --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FieldValueTest.java @@ -0,0 +1,111 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; + +import com.google.api.client.util.Data; +import com.google.api.services.bigquery.model.TableCell; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; + +import org.junit.Test; + +import java.util.Map; + +public class FieldValueTest { + + private static final TableCell BOOLEAN_FIELD = new TableCell().setV("false"); + private static final Map INTEGER_FIELD = ImmutableMap.of("v", "1"); + private static final Map FLOAT_FIELD = ImmutableMap.of("v", "1.5"); + private static final Map STRING_FIELD = ImmutableMap.of("v", "string"); + private static final Map TIMESTAMP_FIELD = ImmutableMap.of("v", "42"); + private static final Map NULL_FIELD = + ImmutableMap.of("v", Data.nullOf(String.class)); + private static final Map REPEATED_FIELD = + ImmutableMap.of("v", ImmutableList.of(INTEGER_FIELD, INTEGER_FIELD)); + private static final Map RECORD_FIELD = + ImmutableMap.of("f", ImmutableList.of(FLOAT_FIELD, TIMESTAMP_FIELD)); + + @Test + public void testFromPb() { + FieldValue value = FieldValue.fromPb(BOOLEAN_FIELD); + assertEquals(FieldValue.Attribute.PRIMITIVE, value.attribute()); + assertFalse(value.booleanValue()); + value = FieldValue.fromPb(INTEGER_FIELD); + assertEquals(FieldValue.Attribute.PRIMITIVE, value.attribute()); + assertEquals(1, value.longValue()); + value = FieldValue.fromPb(FLOAT_FIELD); + assertEquals(FieldValue.Attribute.PRIMITIVE, value.attribute()); + assertEquals(1.5, value.doubleValue(), 0); + value = FieldValue.fromPb(STRING_FIELD); + assertEquals(FieldValue.Attribute.PRIMITIVE, value.attribute()); + assertEquals("string", value.stringValue()); + value = FieldValue.fromPb(TIMESTAMP_FIELD); + assertEquals(FieldValue.Attribute.PRIMITIVE, value.attribute()); + assertEquals(42000000, value.timestampValue()); + value = FieldValue.fromPb(NULL_FIELD); + assertNull(value.value()); + value = FieldValue.fromPb(REPEATED_FIELD); + assertEquals(FieldValue.Attribute.REPEATED, value.attribute()); + assertEquals(FieldValue.fromPb(INTEGER_FIELD), value.repeatedValue().get(0)); + assertEquals(FieldValue.fromPb(INTEGER_FIELD), value.repeatedValue().get(1)); + value = FieldValue.fromPb(RECORD_FIELD); + assertEquals(FieldValue.Attribute.RECORD, value.attribute()); + assertEquals(FieldValue.fromPb(FLOAT_FIELD), value.repeatedValue().get(0)); + assertEquals(FieldValue.fromPb(TIMESTAMP_FIELD), value.repeatedValue().get(1)); + } + + @Test + public void testEquals() { + FieldValue booleanValue = new FieldValue(FieldValue.Attribute.PRIMITIVE, "false"); + assertEquals(booleanValue, FieldValue.fromPb(BOOLEAN_FIELD)); + assertEquals(booleanValue.hashCode(), FieldValue.fromPb(BOOLEAN_FIELD).hashCode()); + + FieldValue integerValue = new FieldValue(FieldValue.Attribute.PRIMITIVE, "1"); + assertEquals(integerValue, FieldValue.fromPb(INTEGER_FIELD)); + assertEquals(integerValue.hashCode(), FieldValue.fromPb(INTEGER_FIELD).hashCode()); + + FieldValue floatValue = new FieldValue(FieldValue.Attribute.PRIMITIVE, "1.5"); + assertEquals(floatValue, FieldValue.fromPb(FLOAT_FIELD)); + assertEquals(floatValue.hashCode(), FieldValue.fromPb(FLOAT_FIELD).hashCode()); + + FieldValue stringValue = new FieldValue(FieldValue.Attribute.PRIMITIVE, "string"); + assertEquals(stringValue, FieldValue.fromPb(STRING_FIELD)); + assertEquals(stringValue.hashCode(), FieldValue.fromPb(STRING_FIELD).hashCode()); + + FieldValue timestampValue = new FieldValue(FieldValue.Attribute.PRIMITIVE, "42"); + assertEquals(timestampValue, FieldValue.fromPb(TIMESTAMP_FIELD)); + assertEquals(timestampValue.hashCode(), FieldValue.fromPb(TIMESTAMP_FIELD).hashCode()); + + FieldValue nullValue = new FieldValue(FieldValue.Attribute.PRIMITIVE, null); + assertEquals(nullValue, FieldValue.fromPb(NULL_FIELD)); + assertEquals(nullValue.hashCode(), FieldValue.fromPb(NULL_FIELD).hashCode()); + + FieldValue repeatedValue = new FieldValue(FieldValue.Attribute.REPEATED, + ImmutableList.of(integerValue, integerValue)); + assertEquals(repeatedValue, FieldValue.fromPb(REPEATED_FIELD)); + assertEquals(repeatedValue.hashCode(), FieldValue.fromPb(REPEATED_FIELD).hashCode()); + + FieldValue recordValue = new FieldValue(FieldValue.Attribute.RECORD, + ImmutableList.of(floatValue, timestampValue)); + assertEquals(recordValue, FieldValue.fromPb(RECORD_FIELD)); + assertEquals(recordValue.hashCode(), FieldValue.fromPb(RECORD_FIELD).hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FormatOptionsTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FormatOptionsTest.java new file mode 100644 index 000000000000..df939143156b --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FormatOptionsTest.java @@ -0,0 +1,52 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class FormatOptionsTest { + + @Test + public void testConstructor() { + FormatOptions options = new FormatOptions(FormatOptions.CSV); + assertEquals(FormatOptions.CSV, options.type()); + options = new FormatOptions(FormatOptions.JSON); + assertEquals(FormatOptions.JSON, options.type()); + options = new FormatOptions(FormatOptions.DATASTORE_BACKUP); + assertEquals(FormatOptions.DATASTORE_BACKUP, options.type()); + } + + @Test + public void testFactoryMethods() { + assertEquals(FormatOptions.CSV, FormatOptions.csv().type()); + assertEquals(FormatOptions.JSON, FormatOptions.json().type()); + assertEquals(FormatOptions.DATASTORE_BACKUP, FormatOptions.datastoreBackup().type()); + } + + @Test + public void testEquals() { + assertEquals(FormatOptions.csv(), FormatOptions.csv()); + assertEquals(FormatOptions.csv().hashCode(), FormatOptions.csv().hashCode()); + assertEquals(FormatOptions.json(), FormatOptions.json()); + assertEquals(FormatOptions.json().hashCode(), FormatOptions.json().hashCode()); + assertEquals(FormatOptions.datastoreBackup(), FormatOptions.datastoreBackup()); + assertEquals(FormatOptions.datastoreBackup().hashCode(), + FormatOptions.datastoreBackup().hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java new file mode 100644 index 000000000000..e083d3682d8c --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java @@ -0,0 +1,923 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.gcloud.bigquery.BigQuery.DatasetField; +import static com.google.gcloud.bigquery.BigQuery.JobField; +import static com.google.gcloud.bigquery.BigQuery.JobListOption; +import static com.google.gcloud.bigquery.BigQuery.JobOption; +import static com.google.gcloud.bigquery.BigQuery.TableField; +import static com.google.gcloud.bigquery.BigQuery.TableOption; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.Page; +import com.google.gcloud.bigquery.BigQuery.DatasetOption; +import com.google.gcloud.bigquery.testing.RemoteBigQueryHelper; +import com.google.gcloud.storage.BlobInfo; +import com.google.gcloud.storage.BucketInfo; +import com.google.gcloud.storage.Storage; +import com.google.gcloud.storage.testing.RemoteGcsHelper; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.Timeout; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.logging.Level; +import java.util.logging.Logger; + +public class ITBigQueryTest { + + private static final Logger log = Logger.getLogger(ITBigQueryTest.class.getName()); + private static final String DATASET = RemoteBigQueryHelper.generateDatasetName(); + private static final String DESCRIPTION = "Test dataset"; + private static final String OTHER_DATASET = RemoteBigQueryHelper.generateDatasetName(); + private static final Field TIMESTAMP_FIELD_SCHEMA = + Field.builder("TimestampField", Field.Type.timestamp()) + .mode(Field.Mode.NULLABLE) + .description("TimestampDescription") + .build(); + private static final Field STRING_FIELD_SCHEMA = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("StringDescription") + .build(); + private static final Field INTEGER_FIELD_SCHEMA = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("IntegerDescription") + .build(); + private static final Field BOOLEAN_FIELD_SCHEMA = + Field.builder("BooleanField", Field.Type.bool()) + .mode(Field.Mode.NULLABLE) + .description("BooleanDescription") + .build(); + private static final Field RECORD_FIELD_SCHEMA = + Field.builder("RecordField", Field.Type.record(TIMESTAMP_FIELD_SCHEMA, + STRING_FIELD_SCHEMA, INTEGER_FIELD_SCHEMA, BOOLEAN_FIELD_SCHEMA)) + .mode(Field.Mode.REQUIRED) + .description("RecordDescription") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(TIMESTAMP_FIELD_SCHEMA, STRING_FIELD_SCHEMA, + INTEGER_FIELD_SCHEMA, BOOLEAN_FIELD_SCHEMA, RECORD_FIELD_SCHEMA); + private static final Schema SIMPLE_SCHEMA = Schema.of(STRING_FIELD_SCHEMA); + private static final Schema QUERY_RESULT_SCHEMA = Schema.builder() + .addField(Field.builder("TimestampField", Field.Type.timestamp()) + .mode(Field.Mode.NULLABLE) + .build()) + .addField(Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .build()) + .addField(Field.builder("BooleanField", Field.Type.bool()) + .mode(Field.Mode.NULLABLE) + .build()) + .build(); + private static final String LOAD_FILE = "load.csv"; + private static final String JSON_LOAD_FILE = "load.json"; + private static final String EXTRACT_FILE = "extract.csv"; + private static final String BUCKET = RemoteGcsHelper.generateBucketName(); + private static final TableId TABLE_ID = TableId.of(DATASET, "testing_table"); + private static final String CSV_CONTENT = "StringValue1\nStringValue2\n"; + private static final String JSON_CONTENT = "{" + + "\"TimestampField\": \"2014-08-19 07:41:35.220 -05:00\"," + + "\"StringField\": \"stringValue\"," + + "\"IntegerField\": [\"0\", \"1\"]," + + "\"BooleanField\": \"false\"," + + "\"RecordField\": {" + + "\"TimestampField\": \"1969-07-20 20:18:04 UTC\"," + + "\"StringField\": null," + + "\"IntegerField\": [\"1\",\"0\"]," + + "\"BooleanField\": \"true\"" + + "}" + + "}\n" + + "{" + + "\"TimestampField\": \"2014-08-19 07:41:35.220 -05:00\"," + + "\"StringField\": \"stringValue\"," + + "\"IntegerField\": [\"0\", \"1\"]," + + "\"BooleanField\": \"false\"," + + "\"RecordField\": {" + + "\"TimestampField\": \"1969-07-20 20:18:04 UTC\"," + + "\"StringField\": null," + + "\"IntegerField\": [\"1\",\"0\"]," + + "\"BooleanField\": \"true\"" + + "}" + + "}"; + + private static BigQuery bigquery; + private static Storage storage; + + @Rule + public Timeout globalTimeout = Timeout.seconds(300); + + @BeforeClass + public static void beforeClass() throws IOException, InterruptedException { + RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); + RemoteGcsHelper gcsHelper = RemoteGcsHelper.create(); + bigquery = bigqueryHelper.options().service(); + storage = gcsHelper.options().service(); + storage.create(BucketInfo.of(BUCKET)); + storage.create(BlobInfo.builder(BUCKET, LOAD_FILE).contentType("text/plain").build(), + CSV_CONTENT.getBytes(StandardCharsets.UTF_8)); + storage.create(BlobInfo.builder(BUCKET, JSON_LOAD_FILE).contentType("application/json").build(), + JSON_CONTENT.getBytes(StandardCharsets.UTF_8)); + DatasetInfo info = DatasetInfo.builder(DATASET).description(DESCRIPTION).build(); + bigquery.create(info); + LoadJobConfiguration configuration = LoadJobConfiguration.builder( + TABLE_ID, "gs://" + BUCKET + "/" + JSON_LOAD_FILE, FormatOptions.json()) + .createDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .schema(TABLE_SCHEMA) + .build(); + JobInfo job = bigquery.create(JobInfo.of(configuration)); + while (job.status().state() != JobStatus.State.DONE) { + Thread.sleep(1000); + job = bigquery.getJob(job.jobId()); + } + assertNull(job.status().error()); + } + + @AfterClass + public static void afterClass() throws ExecutionException, InterruptedException { + if (bigquery != null) { + RemoteBigQueryHelper.forceDelete(bigquery, DATASET); + } + if (storage != null && !RemoteGcsHelper.forceDelete(storage, BUCKET, 10, TimeUnit.SECONDS)) { + if (log.isLoggable(Level.WARNING)) { + log.log(Level.WARNING, "Deletion of bucket {0} timed out, bucket is not empty", BUCKET); + } + } + } + + @Test + public void testGetDataset() { + DatasetInfo dataset = bigquery.getDataset(DATASET); + assertEquals(bigquery.options().projectId(), dataset.datasetId().project()); + assertEquals(DATASET, dataset.datasetId().dataset()); + assertEquals(DESCRIPTION, dataset.description()); + assertNotNull(dataset.acl()); + assertNotNull(dataset.etag()); + assertNotNull(dataset.id()); + assertNotNull(dataset.lastModified()); + assertNotNull(dataset.selfLink()); + } + + @Test + public void testGetDatasetWithSelectedFields() { + DatasetInfo dataset = bigquery.getDataset(DATASET, + DatasetOption.fields(DatasetField.CREATION_TIME)); + assertEquals(bigquery.options().projectId(), dataset.datasetId().project()); + assertEquals(DATASET, dataset.datasetId().dataset()); + assertNotNull(dataset.creationTime()); + assertNull(dataset.description()); + assertNull(dataset.defaultTableLifetime()); + assertNull(dataset.acl()); + assertNull(dataset.etag()); + assertNull(dataset.friendlyName()); + assertNull(dataset.id()); + assertNull(dataset.lastModified()); + assertNull(dataset.location()); + assertNull(dataset.selfLink()); + } + + @Test + public void testUpdateDataset() { + DatasetInfo dataset = bigquery.create(DatasetInfo.builder(OTHER_DATASET) + .description("Some Description") + .build()); + assertNotNull(dataset); + assertEquals(bigquery.options().projectId(), dataset.datasetId().project()); + assertEquals(OTHER_DATASET, dataset.datasetId().dataset()); + assertEquals("Some Description", dataset.description()); + DatasetInfo updatedDataset = + bigquery.update(dataset.toBuilder().description("Updated Description").build()); + assertEquals("Updated Description", updatedDataset.description()); + assertTrue(bigquery.delete(OTHER_DATASET)); + } + + @Test + public void testUpdateDatasetWithSelectedFields() { + DatasetInfo dataset = bigquery.create(DatasetInfo.builder(OTHER_DATASET) + .description("Some Description") + .build()); + assertNotNull(dataset); + assertEquals(bigquery.options().projectId(), dataset.datasetId().project()); + assertEquals(OTHER_DATASET, dataset.datasetId().dataset()); + assertEquals("Some Description", dataset.description()); + DatasetInfo updatedDataset = + bigquery.update(dataset.toBuilder().description("Updated Description").build(), + DatasetOption.fields(DatasetField.DESCRIPTION)); + assertEquals("Updated Description", updatedDataset.description()); + assertNull(updatedDataset.creationTime()); + assertNull(updatedDataset.defaultTableLifetime()); + assertNull(updatedDataset.acl()); + assertNull(updatedDataset.etag()); + assertNull(updatedDataset.friendlyName()); + assertNull(updatedDataset.id()); + assertNull(updatedDataset.lastModified()); + assertNull(updatedDataset.location()); + assertNull(updatedDataset.selfLink()); + assertTrue(bigquery.delete(OTHER_DATASET)); + } + + @Test + public void testGetNonExistingTable() { + assertNull(bigquery.getTable(DATASET, "test_get_non_existing_table")); + } + + @Test + public void testCreateAndGetTable() { + String tableName = "test_create_and_get_table"; + TableId tableId = TableId.of(DATASET, tableName); + BaseTableInfo createdTableInfo = bigquery.create(TableInfo.of(tableId, TABLE_SCHEMA)); + assertNotNull(createdTableInfo); + assertEquals(DATASET, createdTableInfo.tableId().dataset()); + assertEquals(tableName, createdTableInfo.tableId().table()); + BaseTableInfo remoteTableInfo = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTableInfo); + assertTrue(remoteTableInfo instanceof TableInfo); + assertEquals(createdTableInfo.tableId(), remoteTableInfo.tableId()); + assertEquals(BaseTableInfo.Type.TABLE, remoteTableInfo.type()); + assertEquals(TABLE_SCHEMA, remoteTableInfo.schema()); + assertNotNull(remoteTableInfo.creationTime()); + assertNotNull(remoteTableInfo.lastModifiedTime()); + assertNotNull(remoteTableInfo.numBytes()); + assertNotNull(remoteTableInfo.numRows()); + assertTrue(bigquery.delete(DATASET, tableName)); + } + + @Test + public void testCreateAndGetTableWithSelectedField() { + String tableName = "test_create_and_get_selected_fields_table"; + TableId tableId = TableId.of(DATASET, tableName); + BaseTableInfo createdTableInfo = bigquery.create(TableInfo.of(tableId, TABLE_SCHEMA)); + assertNotNull(createdTableInfo); + assertEquals(DATASET, createdTableInfo.tableId().dataset()); + assertEquals(tableName, createdTableInfo.tableId().table()); + BaseTableInfo remoteTableInfo = bigquery.getTable(DATASET, tableName, + TableOption.fields(TableField.CREATION_TIME)); + assertNotNull(remoteTableInfo); + assertTrue(remoteTableInfo instanceof TableInfo); + assertEquals(createdTableInfo.tableId(), remoteTableInfo.tableId()); + assertEquals(BaseTableInfo.Type.TABLE, remoteTableInfo.type()); + assertNotNull(remoteTableInfo.creationTime()); + assertNull(remoteTableInfo.schema()); + assertNull(remoteTableInfo.lastModifiedTime()); + assertNull(remoteTableInfo.numBytes()); + assertNull(remoteTableInfo.numRows()); + assertTrue(bigquery.delete(DATASET, tableName)); + } + + @Test + public void testCreateExternalTable() throws InterruptedException { + String tableName = "test_create_external_table"; + TableId tableId = TableId.of(DATASET, tableName); + ExternalDataConfiguration externalDataConfiguration = ExternalDataConfiguration.of( + "gs://" + BUCKET + "/" + JSON_LOAD_FILE, TABLE_SCHEMA, FormatOptions.json()); + BaseTableInfo tableInfo = ExternalTableInfo.of(tableId, externalDataConfiguration); + BaseTableInfo createdTableInfo = bigquery.create(tableInfo); + assertNotNull(createdTableInfo); + assertEquals(DATASET, createdTableInfo.tableId().dataset()); + assertEquals(tableName, createdTableInfo.tableId().table()); + BaseTableInfo remoteTableInfo = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTableInfo); + assertTrue(remoteTableInfo instanceof ExternalTableInfo); + assertEquals(createdTableInfo.tableId(), remoteTableInfo.tableId()); + assertEquals(TABLE_SCHEMA, remoteTableInfo.schema()); + QueryRequest request = QueryRequest.builder( + "SELECT TimestampField, StringField, IntegerField, BooleanField FROM " + DATASET + "." + + tableName) + .defaultDataset(DatasetId.of(DATASET)) + .maxWaitTime(60000L) + .maxResults(1000L) + .build(); + QueryResponse response = bigquery.query(request); + while (!response.jobCompleted()) { + response = bigquery.getQueryResults(response.jobId()); + Thread.sleep(1000); + } + long integerValue = 0; + int rowCount = 0; + for (List row : response.result().values()) { + FieldValue timestampCell = row.get(0); + FieldValue stringCell = row.get(1); + FieldValue integerCell = row.get(2); + FieldValue booleanCell = row.get(3); + assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, integerCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute()); + assertEquals(1408452095220000L, timestampCell.timestampValue()); + assertEquals("stringValue", stringCell.stringValue()); + assertEquals(integerValue, integerCell.longValue()); + assertEquals(false, booleanCell.booleanValue()); + integerValue = ~integerValue & 0x1; + rowCount++; + } + assertEquals(4, rowCount); + assertTrue(bigquery.delete(DATASET, tableName)); + } + + @Test + public void testCreateViewTable() throws InterruptedException { + String tableName = "test_create_view_table"; + TableId tableId = TableId.of(DATASET, tableName); + BaseTableInfo tableInfo = ViewInfo.of(tableId, + "SELECT TimestampField, StringField, BooleanField FROM " + DATASET + "." + + TABLE_ID.table()); + BaseTableInfo createdTableInfo = bigquery.create(tableInfo); + assertNotNull(createdTableInfo); + assertEquals(DATASET, createdTableInfo.tableId().dataset()); + assertEquals(tableName, createdTableInfo.tableId().table()); + BaseTableInfo remoteTableInfo = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTableInfo); + assertEquals(createdTableInfo.tableId(), remoteTableInfo.tableId()); + assertTrue(remoteTableInfo instanceof ViewInfo); + Schema expectedSchema = Schema.builder() + .addField( + Field.builder("TimestampField", Field.Type.timestamp()) + .mode(Field.Mode.NULLABLE) + .build()) + .addField( + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .build()) + .addField( + Field.builder("BooleanField", Field.Type.bool()) + .mode(Field.Mode.NULLABLE) + .build()) + .build(); + assertEquals(expectedSchema, remoteTableInfo.schema()); + QueryRequest request = QueryRequest.builder("SELECT * FROM " + tableName) + .defaultDataset(DatasetId.of(DATASET)) + .maxWaitTime(60000L) + .maxResults(1000L) + .build(); + QueryResponse response = bigquery.query(request); + while (!response.jobCompleted()) { + response = bigquery.getQueryResults(response.jobId()); + Thread.sleep(1000); + } + int rowCount = 0; + for (List row : response.result().values()) { + FieldValue timestampCell = row.get(0); + FieldValue stringCell = row.get(1); + FieldValue booleanCell = row.get(2); + assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute()); + assertEquals(1408452095220000L, timestampCell.timestampValue()); + assertEquals("stringValue", stringCell.stringValue()); + assertEquals(false, booleanCell.booleanValue()); + rowCount++; + } + assertEquals(2, rowCount); + assertTrue(bigquery.delete(DATASET, tableName)); + } + + @Test + public void testListTables() { + String tableName = "test_list_tables"; + BaseTableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), TABLE_SCHEMA); + BaseTableInfo createdTableInfo = bigquery.create(tableInfo); + assertNotNull(createdTableInfo); + Page tables = bigquery.listTables(DATASET); + boolean found = false; + Iterator tableIterator = tables.values().iterator(); + while (tableIterator.hasNext() && !found) { + if (tableIterator.next().tableId().equals(createdTableInfo.tableId())) { + found = true; + } + } + assertTrue(found); + assertTrue(bigquery.delete(DATASET, tableName)); + } + + @Test + public void testUpdateTable() { + String tableName = "test_update_table"; + BaseTableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), TABLE_SCHEMA); + BaseTableInfo createdTableInfo = bigquery.create(tableInfo); + assertNotNull(createdTableInfo); + BaseTableInfo updatedTableInfo = bigquery.update(tableInfo.toBuilder() + .description("newDescription").build()); + assertEquals(DATASET, updatedTableInfo.tableId().dataset()); + assertEquals(tableName, updatedTableInfo.tableId().table()); + assertEquals(TABLE_SCHEMA, updatedTableInfo.schema()); + assertEquals("newDescription", updatedTableInfo.description()); + assertTrue(bigquery.delete(DATASET, tableName)); + } + + @Test + public void testUpdateTableWithSelectedFields() { + String tableName = "test_update_with_selected_fields_table"; + BaseTableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), TABLE_SCHEMA); + BaseTableInfo createdTableInfo = bigquery.create(tableInfo); + assertNotNull(createdTableInfo); + BaseTableInfo updatedTableInfo = bigquery.update(tableInfo.toBuilder().description("newDescr") + .build(), TableOption.fields(TableField.DESCRIPTION)); + assertTrue(updatedTableInfo instanceof TableInfo); + assertEquals(DATASET, updatedTableInfo.tableId().dataset()); + assertEquals(tableName, updatedTableInfo.tableId().table()); + assertEquals("newDescr", updatedTableInfo.description()); + assertNull(updatedTableInfo.schema()); + assertNull(updatedTableInfo.lastModifiedTime()); + assertNull(updatedTableInfo.numBytes()); + assertNull(updatedTableInfo.numRows()); + assertTrue(bigquery.delete(DATASET, tableName)); + } + + @Test + public void testUpdateNonExistingTable() { + TableInfo tableInfo = + TableInfo.of(TableId.of(DATASET, "test_update_non_existing_table"), SIMPLE_SCHEMA); + try { + bigquery.update(tableInfo); + fail("BigQueryException was expected"); + } catch (BigQueryException e) { + BigQueryError error = e.error(); + assertNotNull(error); + assertEquals("notFound", error.reason()); + assertNotNull(error.message()); + } + } + + @Test + public void testDeleteNonExistingTable() { + assertFalse(bigquery.delete(DATASET, "test_delete_non_existing_table")); + } + + @Test + public void testInsertAll() { + String tableName = "test_insert_all_table"; + BaseTableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), TABLE_SCHEMA); + assertNotNull(bigquery.create(tableInfo)); + InsertAllRequest request = InsertAllRequest.builder(tableInfo.tableId()) + .addRow(ImmutableMap.of( + "TimestampField", "2014-08-19 07:41:35.220 -05:00", + "StringField", "stringValue", + "IntegerField", ImmutableList.of(0, 1), + "BooleanField", false, + "RecordField", ImmutableMap.of( + "TimestampField", "1969-07-20 20:18:04 UTC", + "IntegerField", ImmutableList.of(1, 0), + "BooleanField", true))) + .addRow(ImmutableMap.of( + "TimestampField", "2014-08-19 07:41:35.220 -05:00", + "StringField", "stringValue", + "IntegerField", ImmutableList.of(0, 1), + "BooleanField", false, + "RecordField", ImmutableMap.of( + "TimestampField", "1969-07-20 20:18:04 UTC", + "IntegerField", ImmutableList.of(1, 0), + "BooleanField", true))) + .build(); + InsertAllResponse response = bigquery.insertAll(request); + assertFalse(response.hasErrors()); + assertEquals(0, response.insertErrors().size()); + assertTrue(bigquery.delete(TableId.of(DATASET, tableName))); + } + + @Test + public void testInsertAllWithSuffix() throws InterruptedException { + String tableName = "test_insert_all_with_suffix_table"; + BaseTableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), TABLE_SCHEMA); + assertNotNull(bigquery.create(tableInfo)); + InsertAllRequest request = InsertAllRequest.builder(tableInfo.tableId()) + .addRow(ImmutableMap.of( + "TimestampField", "2014-08-19 07:41:35.220 -05:00", + "StringField", "stringValue", + "IntegerField", ImmutableList.of(0, 1), + "BooleanField", false, + "RecordField", ImmutableMap.of( + "TimestampField", "1969-07-20 20:18:04 UTC", + "IntegerField", ImmutableList.of(1, 0), + "BooleanField", true))) + .addRow(ImmutableMap.of( + "TimestampField", "2014-08-19 07:41:35.220 -05:00", + "StringField", "stringValue", + "IntegerField", ImmutableList.of(0, 1), + "BooleanField", false, + "RecordField", ImmutableMap.of( + "TimestampField", "1969-07-20 20:18:04 UTC", + "IntegerField", ImmutableList.of(1, 0), + "BooleanField", true))) + .templateSuffix("_suffix") + .build(); + InsertAllResponse response = bigquery.insertAll(request); + assertFalse(response.hasErrors()); + assertEquals(0, response.insertErrors().size()); + String newTableName = tableName + "_suffix"; + BaseTableInfo suffixTable = bigquery.getTable(DATASET, newTableName, TableOption.fields()); + // wait until the new table is created. If the table is never created the test will time-out + while (suffixTable == null) { + Thread.sleep(1000L); + suffixTable = bigquery.getTable(DATASET, newTableName, TableOption.fields()); + } + assertTrue(bigquery.delete(TableId.of(DATASET, tableName))); + assertTrue(bigquery.delete(TableId.of(DATASET, newTableName))); + } + + @Test + public void testInsertAllWithErrors() { + String tableName = "test_insert_all_with_errors_table"; + BaseTableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), TABLE_SCHEMA); + assertNotNull(bigquery.create(tableInfo)); + InsertAllRequest request = InsertAllRequest.builder(tableInfo.tableId()) + .addRow(ImmutableMap.of( + "TimestampField", "2014-08-19 07:41:35.220 -05:00", + "StringField", "stringValue", + "IntegerField", ImmutableList.of(0, 1), + "BooleanField", false, + "RecordField", ImmutableMap.of( + "TimestampField", "1969-07-20 20:18:04 UTC", + "IntegerField", ImmutableList.of(1, 0), + "BooleanField", true))) + .addRow(ImmutableMap.of( + "TimestampField", "invalidDate", + "StringField", "stringValue", + "IntegerField", ImmutableList.of(0, 1), + "BooleanField", false, + "RecordField", ImmutableMap.of( + "TimestampField", "1969-07-20 20:18:04 UTC", + "IntegerField", ImmutableList.of(1, 0), + "BooleanField", true))) + .addRow(ImmutableMap.of( + "TimestampField", "1969-07-20 20:18:04 UTC", + "StringField", "stringValue", + "IntegerField", ImmutableList.of(0, 1), + "BooleanField", false)) + .skipInvalidRows(true) + .build(); + InsertAllResponse response = bigquery.insertAll(request); + assertTrue(response.hasErrors()); + assertEquals(2, response.insertErrors().size()); + assertNotNull(response.errorsFor(1L)); + assertNotNull(response.errorsFor(2L)); + assertTrue(bigquery.delete(TableId.of(DATASET, tableName))); + } + + @Test + public void testListAllTableData() { + Page> rows = bigquery.listTableData(TABLE_ID); + int rowCount = 0; + for (List row : rows.values()) { + FieldValue timestampCell = row.get(0); + FieldValue stringCell = row.get(1); + FieldValue integerCell = row.get(2); + FieldValue booleanCell = row.get(3); + FieldValue recordCell = row.get(4); + assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute()); + assertEquals(FieldValue.Attribute.REPEATED, integerCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute()); + assertEquals(FieldValue.Attribute.RECORD, recordCell.attribute()); + assertEquals(1408452095220000L, timestampCell.timestampValue()); + assertEquals("stringValue", stringCell.stringValue()); + assertEquals(0, integerCell.repeatedValue().get(0).longValue()); + assertEquals(1, integerCell.repeatedValue().get(1).longValue()); + assertEquals(false, booleanCell.booleanValue()); + assertEquals(-14182916000000L, recordCell.recordValue().get(0).timestampValue()); + assertTrue(recordCell.recordValue().get(1).isNull()); + assertEquals(1, recordCell.recordValue().get(2).repeatedValue().get(0).longValue()); + assertEquals(0, recordCell.recordValue().get(2).repeatedValue().get(1).longValue()); + assertEquals(true, recordCell.recordValue().get(3).booleanValue()); + rowCount++; + } + assertEquals(2, rowCount); + } + + @Test + public void testQuery() throws InterruptedException { + String query = new StringBuilder() + .append("SELECT TimestampField, StringField, BooleanField FROM ") + .append(TABLE_ID.table()) + .toString(); + QueryRequest request = QueryRequest.builder(query) + .defaultDataset(DatasetId.of(DATASET)) + .maxWaitTime(60000L) + .maxResults(1000L) + .build(); + QueryResponse response = bigquery.query(request); + while (!response.jobCompleted()) { + Thread.sleep(1000); + response = bigquery.getQueryResults(response.jobId()); + } + assertEquals(QUERY_RESULT_SCHEMA, response.result().schema()); + int rowCount = 0; + for (List row : response.result().values()) { + FieldValue timestampCell = row.get(0); + FieldValue stringCell = row.get(1); + FieldValue booleanCell = row.get(2); + assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute()); + assertEquals(1408452095220000L, timestampCell.timestampValue()); + assertEquals("stringValue", stringCell.stringValue()); + assertEquals(false, booleanCell.booleanValue()); + rowCount++; + } + assertEquals(2, rowCount); + JobInfo queryJob = bigquery.getJob(response.jobId()); + JobStatistics.QueryStatistics statistics = queryJob.statistics(); + assertNotNull(statistics.queryPlan()); + } + + @Test + public void testListJobs() { + Page jobs = bigquery.listJobs(); + for (JobInfo job : jobs.values()) { + assertNotNull(job.jobId()); + assertNotNull(job.statistics()); + assertNotNull(job.status()); + assertNotNull(job.userEmail()); + assertNotNull(job.id()); + } + } + + @Test + public void testListJobsWithSelectedFields() { + Page jobs = bigquery.listJobs(JobListOption.fields(JobField.USER_EMAIL)); + for (JobInfo job : jobs.values()) { + assertNotNull(job.jobId()); + assertNotNull(job.status()); + assertNotNull(job.userEmail()); + assertNull(job.statistics()); + assertNull(job.id()); + } + } + + @Test + public void testCreateAndGetJob() throws InterruptedException { + String sourceTableName = "test_create_and_get_job_source_table"; + String destinationTableName = "test_create_and_get_job_destination_table"; + TableId sourceTable = TableId.of(DATASET, sourceTableName); + BaseTableInfo tableInfo = TableInfo.of(sourceTable, SIMPLE_SCHEMA); + BaseTableInfo createdTableInfo = bigquery.create(tableInfo); + assertNotNull(createdTableInfo); + assertEquals(DATASET, createdTableInfo.tableId().dataset()); + assertEquals(sourceTableName, createdTableInfo.tableId().table()); + TableId destinationTable = TableId.of(DATASET, destinationTableName); + CopyJobConfiguration copyJobConfiguration = + CopyJobConfiguration.of(destinationTable, sourceTable); + JobInfo job = JobInfo.of(copyJobConfiguration); + JobInfo createdJob = bigquery.create(job); + JobInfo remoteJob = bigquery.getJob(createdJob.jobId()); + assertEquals(createdJob.jobId(), remoteJob.jobId()); + CopyJobConfiguration createdConfiguration = createdJob.configuration(); + CopyJobConfiguration remoteConfiguration = remoteJob.configuration(); + assertEquals(createdConfiguration.sourceTables(), remoteConfiguration.sourceTables()); + assertEquals(createdConfiguration.destinationTable(), remoteConfiguration.destinationTable()); + assertEquals(createdConfiguration.createDisposition(), remoteConfiguration.createDisposition()); + assertEquals(createdConfiguration.writeDisposition(), remoteConfiguration.writeDisposition()); + assertNotNull(remoteJob.etag()); + assertNotNull(remoteJob.statistics()); + assertNotNull(remoteJob.status()); + assertEquals(createdJob.selfLink(), remoteJob.selfLink()); + assertEquals(createdJob.userEmail(), remoteJob.userEmail()); + assertTrue(bigquery.delete(DATASET, sourceTableName)); + assertTrue(bigquery.delete(DATASET, destinationTableName)); + } + + @Test + public void testCreateAndGetJobWithSelectedFields() throws InterruptedException { + String sourceTableName = "test_create_and_get_job_with_selected_fields_source_table"; + String destinationTableName = "test_create_and_get_job_with_selected_fields_destination_table"; + TableId sourceTable = TableId.of(DATASET, sourceTableName); + BaseTableInfo tableInfo = TableInfo.of(sourceTable, SIMPLE_SCHEMA); + BaseTableInfo createdTableInfo = bigquery.create(tableInfo); + assertNotNull(createdTableInfo); + assertEquals(DATASET, createdTableInfo.tableId().dataset()); + assertEquals(sourceTableName, createdTableInfo.tableId().table()); + TableId destinationTable = TableId.of(DATASET, destinationTableName); + CopyJobConfiguration configuration = CopyJobConfiguration.of(destinationTable, sourceTable); + JobInfo createdJob = + bigquery.create(JobInfo.of(configuration), JobOption.fields(JobField.ETAG)); + CopyJobConfiguration createdConfiguration = createdJob.configuration(); + assertNotNull(createdJob.jobId()); + assertNotNull(createdConfiguration.sourceTables()); + assertNotNull(createdConfiguration.destinationTable()); + assertNotNull(createdJob.etag()); + assertNull(createdJob.statistics()); + assertNull(createdJob.status()); + assertNull(createdJob.selfLink()); + assertNull(createdJob.userEmail()); + JobInfo remoteJob = bigquery.getJob(createdJob.jobId(), JobOption.fields(JobField.ETAG)); + CopyJobConfiguration remoteConfiguration = remoteJob.configuration(); + assertEquals(createdJob.jobId(), remoteJob.jobId()); + assertEquals(createdConfiguration.sourceTables(), remoteConfiguration.sourceTables()); + assertEquals(createdConfiguration.destinationTable(), remoteConfiguration.destinationTable()); + assertEquals(createdConfiguration.createDisposition(), remoteConfiguration.createDisposition()); + assertEquals(createdConfiguration.writeDisposition(), remoteConfiguration.writeDisposition()); + assertNotNull(remoteJob.etag()); + assertNull(remoteJob.statistics()); + assertNull(remoteJob.status()); + assertNull(remoteJob.selfLink()); + assertNull(remoteJob.userEmail()); + assertTrue(bigquery.delete(DATASET, sourceTableName)); + assertTrue(bigquery.delete(DATASET, destinationTableName)); + } + + @Test + public void testCopyJob() throws InterruptedException { + String sourceTableName = "test_copy_job_source_table"; + String destinationTableName = "test_copy_job_destination_table"; + TableId sourceTable = TableId.of(DATASET, sourceTableName); + BaseTableInfo tableInfo = TableInfo.of(sourceTable, SIMPLE_SCHEMA); + BaseTableInfo createdTableInfo = bigquery.create(tableInfo); + assertNotNull(createdTableInfo); + assertEquals(DATASET, createdTableInfo.tableId().dataset()); + assertEquals(sourceTableName, createdTableInfo.tableId().table()); + TableId destinationTable = TableId.of(DATASET, destinationTableName); + CopyJobConfiguration configuration = CopyJobConfiguration.of(destinationTable, sourceTable); + JobInfo remoteJob = bigquery.create(JobInfo.of(configuration)); + while (remoteJob.status().state() != JobStatus.State.DONE) { + Thread.sleep(1000); + remoteJob = bigquery.getJob(remoteJob.jobId()); + } + assertNull(remoteJob.status().error()); + BaseTableInfo remoteTableInfo = bigquery.getTable(DATASET, destinationTableName); + assertNotNull(remoteTableInfo); + assertEquals(destinationTable.dataset(), remoteTableInfo.tableId().dataset()); + assertEquals(destinationTableName, remoteTableInfo.tableId().table()); + assertEquals(SIMPLE_SCHEMA, remoteTableInfo.schema()); + assertTrue(bigquery.delete(DATASET, sourceTableName)); + assertTrue(bigquery.delete(DATASET, destinationTableName)); + } + + @Test + public void testQueryJob() throws InterruptedException { + String tableName = "test_query_job_table"; + String query = new StringBuilder() + .append("SELECT TimestampField, StringField, BooleanField FROM ") + .append(TABLE_ID.table()) + .toString(); + TableId destinationTable = TableId.of(DATASET, tableName); + QueryJobConfiguration configuration = QueryJobConfiguration.builder(query) + .defaultDataset(DatasetId.of(DATASET)) + .destinationTable(destinationTable) + .build(); + JobInfo remoteJob = bigquery.create(JobInfo.of(configuration)); + while (remoteJob.status().state() != JobStatus.State.DONE) { + Thread.sleep(1000); + remoteJob = bigquery.getJob(remoteJob.jobId()); + } + assertNull(remoteJob.status().error()); + + QueryResponse response = bigquery.getQueryResults(remoteJob.jobId()); + while (!response.jobCompleted()) { + Thread.sleep(1000); + response = bigquery.getQueryResults(response.jobId()); + } + assertFalse(response.hasErrors()); + assertEquals(QUERY_RESULT_SCHEMA, response.result().schema()); + int rowCount = 0; + for (List row : response.result().values()) { + FieldValue timestampCell = row.get(0); + FieldValue stringCell = row.get(1); + FieldValue booleanCell = row.get(2); + assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute()); + assertEquals(1408452095220000L, timestampCell.timestampValue()); + assertEquals("stringValue", stringCell.stringValue()); + assertEquals(false, booleanCell.booleanValue()); + rowCount++; + } + assertEquals(2, rowCount); + assertTrue(bigquery.delete(DATASET, tableName)); + JobInfo queryJob = bigquery.getJob(remoteJob.jobId()); + JobStatistics.QueryStatistics statistics = queryJob.statistics(); + assertNotNull(statistics.queryPlan()); + } + + @Test + public void testExtractJob() throws InterruptedException { + String tableName = "test_export_job_table"; + TableId destinationTable = TableId.of(DATASET, tableName); + LoadJobConfiguration configuration = + LoadJobConfiguration.builder(destinationTable, "gs://" + BUCKET + "/" + LOAD_FILE) + .schema(SIMPLE_SCHEMA) + .build(); + JobInfo remoteLoadJob = + bigquery.create(JobInfo.of(configuration)); + while (remoteLoadJob.status().state() != JobStatus.State.DONE) { + Thread.sleep(1000); + remoteLoadJob = bigquery.getJob(remoteLoadJob.jobId()); + } + assertNull(remoteLoadJob.status().error()); + + ExtractJobConfiguration extractConfiguration = + ExtractJobConfiguration.builder(destinationTable, "gs://" + BUCKET + "/" + EXTRACT_FILE) + .printHeader(false) + .build(); + JobInfo extractJob = JobInfo.of(extractConfiguration); + JobInfo remoteExtractJob = bigquery.create(extractJob); + while (remoteExtractJob.status().state() != JobStatus.State.DONE) { + Thread.sleep(1000); + remoteExtractJob = bigquery.getJob(remoteExtractJob.jobId()); + } + assertNull(remoteExtractJob.status().error()); + assertEquals(CSV_CONTENT, + new String(storage.readAllBytes(BUCKET, EXTRACT_FILE), StandardCharsets.UTF_8)); + assertTrue(bigquery.delete(DATASET, tableName)); + } + + @Test + public void testCancelJob() throws InterruptedException { + String destinationTableName = "test_cancel_query_job_table"; + String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.table(); + TableId destinationTable = TableId.of(DATASET, destinationTableName); + QueryJobConfiguration configuration = QueryJobConfiguration.builder(query) + .defaultDataset(DatasetId.of(DATASET)) + .destinationTable(destinationTable) + .build(); + JobInfo remoteJob = bigquery.create(JobInfo.of(configuration)); + assertTrue(bigquery.cancel(remoteJob.jobId())); + while (remoteJob.status().state() != JobStatus.State.DONE) { + Thread.sleep(1000); + remoteJob = bigquery.getJob(remoteJob.jobId()); + } + assertNull(remoteJob.status().error()); + } + + @Test + public void testCancelNonExistingJob() throws InterruptedException { + assertFalse(bigquery.cancel("test_cancel_non_existing_job")); + } + + @Test + public void testInsertFromFile() throws InterruptedException, FileNotFoundException { + String destinationTableName = "test_insert_from_file_table"; + TableId tableId = TableId.of(DATASET, destinationTableName); + WriteChannelConfiguration configuration = WriteChannelConfiguration.builder(tableId) + .formatOptions(FormatOptions.json()) + .createDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .schema(TABLE_SCHEMA) + .build(); + try (TableDataWriteChannel channel = bigquery.writer(configuration)) { + channel.write(ByteBuffer.wrap(JSON_CONTENT.getBytes(StandardCharsets.UTF_8))); + } catch (IOException e) { + fail("IOException was not expected"); + } + // wait until the new table is created. If the table is never created the test will time-out + while (bigquery.getTable(tableId) == null) { + Thread.sleep(1000L); + } + Page> rows = bigquery.listTableData(tableId); + int rowCount = 0; + for (List row : rows.values()) { + FieldValue timestampCell = row.get(0); + FieldValue stringCell = row.get(1); + FieldValue integerCell = row.get(2); + FieldValue booleanCell = row.get(3); + FieldValue recordCell = row.get(4); + assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute()); + assertEquals(FieldValue.Attribute.REPEATED, integerCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute()); + assertEquals(FieldValue.Attribute.RECORD, recordCell.attribute()); + assertEquals(1408452095220000L, timestampCell.timestampValue()); + assertEquals("stringValue", stringCell.stringValue()); + assertEquals(0, integerCell.repeatedValue().get(0).longValue()); + assertEquals(1, integerCell.repeatedValue().get(1).longValue()); + assertEquals(false, booleanCell.booleanValue()); + assertEquals(-14182916000000L, recordCell.recordValue().get(0).timestampValue()); + assertTrue(recordCell.recordValue().get(1).isNull()); + assertEquals(1, recordCell.recordValue().get(2).repeatedValue().get(0).longValue()); + assertEquals(0, recordCell.recordValue().get(2).repeatedValue().get(1).longValue()); + assertEquals(true, recordCell.recordValue().get(3).booleanValue()); + rowCount++; + } + assertEquals(2, rowCount); + assertTrue(bigquery.delete(DATASET, destinationTableName)); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/InsertAllRequestTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/InsertAllRequestTest.java new file mode 100644 index 000000000000..d2e1de14a571 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/InsertAllRequestTest.java @@ -0,0 +1,222 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; + +import org.junit.Test; + +import java.util.List; +import java.util.Map; + +public class InsertAllRequestTest { + + private static final Map CONTENT1 = + ImmutableMap.of("key", "val1"); + private static final Map CONTENT2 = + ImmutableMap.of("key", "val2"); + private static final List ROWS = + ImmutableList.of(InsertAllRequest.RowToInsert.of(CONTENT1), + InsertAllRequest.RowToInsert.of(CONTENT2)); + private static final List ROWS_WITH_ID = + ImmutableList.of(InsertAllRequest.RowToInsert.of("id1", CONTENT1), + InsertAllRequest.RowToInsert.of("id2", CONTENT2)); + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final Schema TABLE_SCHEMA = Schema.of(); + private static final BaseTableInfo TABLE_INFO = TableInfo.of(TABLE_ID, TABLE_SCHEMA); + private static final boolean SKIP_INVALID_ROWS = true; + private static final boolean IGNORE_UNKNOWN_VALUES = false; + private static final String TEMPLATE_SUFFIX = "templateSuffix"; + private static final InsertAllRequest INSERT_ALL_REQUEST1 = InsertAllRequest.builder(TABLE_ID) + .addRow(CONTENT1) + .addRow(CONTENT2) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST2 = InsertAllRequest.builder(TABLE_ID) + .rows(ROWS) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST3 = + InsertAllRequest.builder(TABLE_ID.dataset(), TABLE_ID.table()) + .rows(ROWS_WITH_ID) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST4 = + InsertAllRequest.builder(TABLE_ID, ROWS) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST5 = + InsertAllRequest.builder(TABLE_ID.dataset(), TABLE_ID.table(), ROWS_WITH_ID) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST6 = + InsertAllRequest.builder(TABLE_ID, ROWS.get(0), ROWS.get(1)) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST7 = + InsertAllRequest.builder(TABLE_ID.dataset(), TABLE_ID.table(), ROWS_WITH_ID.get(0), + ROWS_WITH_ID.get(1)) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST8 = + InsertAllRequest.builder(TABLE_ID.dataset(), TABLE_ID.table()) + .addRow("id1", CONTENT1) + .addRow("id2", CONTENT2) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST9 = InsertAllRequest.builder(TABLE_INFO) + .addRow("id1", CONTENT1) + .addRow("id2", CONTENT2) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST10 = InsertAllRequest.builder(TABLE_INFO) + .addRow("id1", CONTENT1) + .addRow("id2", CONTENT2) + .ignoreUnknownValues(true) + .skipInvalidRows(false) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST11 = InsertAllRequest.builder(TABLE_INFO) + .addRow("id1", CONTENT1) + .addRow("id2", CONTENT2) + .ignoreUnknownValues(true) + .skipInvalidRows(false) + .templateSuffix(TEMPLATE_SUFFIX) + .build(); + + @Test + public void testBuilder() { + assertEquals(TABLE_ID, INSERT_ALL_REQUEST1.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST2.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST3.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST4.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST5.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST6.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST7.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST8.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST9.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST10.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST11.table()); + assertEquals(ROWS, INSERT_ALL_REQUEST1.rows()); + assertEquals(ROWS, INSERT_ALL_REQUEST2.rows()); + assertEquals(ROWS, INSERT_ALL_REQUEST4.rows()); + assertEquals(ROWS, INSERT_ALL_REQUEST6.rows()); + assertEquals(ROWS_WITH_ID, INSERT_ALL_REQUEST3.rows()); + assertEquals(ROWS_WITH_ID, INSERT_ALL_REQUEST5.rows()); + assertEquals(ROWS_WITH_ID, INSERT_ALL_REQUEST7.rows()); + assertEquals(ROWS_WITH_ID, INSERT_ALL_REQUEST8.rows()); + assertEquals(ROWS_WITH_ID, INSERT_ALL_REQUEST9.rows()); + assertEquals(ROWS_WITH_ID, INSERT_ALL_REQUEST10.rows()); + assertEquals(ROWS_WITH_ID, INSERT_ALL_REQUEST11.rows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST1.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST2.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST3.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST4.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST5.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST6.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST7.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST8.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST9.skipInvalidRows()); + assertFalse(INSERT_ALL_REQUEST10.skipInvalidRows()); + assertFalse(INSERT_ALL_REQUEST11.skipInvalidRows()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST1.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST2.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST3.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST4.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST5.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST6.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST7.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST8.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST9.ignoreUnknownValues()); + assertTrue(INSERT_ALL_REQUEST10.ignoreUnknownValues()); + assertTrue(INSERT_ALL_REQUEST11.ignoreUnknownValues()); + assertNull(INSERT_ALL_REQUEST1.templateSuffix()); + assertNull(INSERT_ALL_REQUEST2.templateSuffix()); + assertNull(INSERT_ALL_REQUEST3.templateSuffix()); + assertNull(INSERT_ALL_REQUEST4.templateSuffix()); + assertNull(INSERT_ALL_REQUEST5.templateSuffix()); + assertNull(INSERT_ALL_REQUEST6.templateSuffix()); + assertNull(INSERT_ALL_REQUEST7.templateSuffix()); + assertNull(INSERT_ALL_REQUEST8.templateSuffix()); + assertNull(INSERT_ALL_REQUEST9.templateSuffix()); + assertNull(INSERT_ALL_REQUEST10.templateSuffix()); + assertEquals(TEMPLATE_SUFFIX, INSERT_ALL_REQUEST11.templateSuffix()); + } + + @Test + public void testOf() { + InsertAllRequest request = InsertAllRequest.of(TABLE_ID, ROWS); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + request = InsertAllRequest.of(TABLE_INFO, ROWS); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + request = InsertAllRequest.of(TABLE_ID.dataset(), TABLE_ID.table(), ROWS); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + request = InsertAllRequest.of(TABLE_ID.dataset(), TABLE_ID.table(), ROWS); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + request = InsertAllRequest.of(TABLE_ID, ROWS.get(0), ROWS.get(1)); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + request = InsertAllRequest.of(TABLE_INFO, ROWS.get(0), ROWS.get(1)); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + request = InsertAllRequest.of(TABLE_ID.dataset(), TABLE_ID.table(), ROWS.get(0), ROWS.get(1)); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + } + + @Test + public void testEquals() { + compareInsertAllRequest(INSERT_ALL_REQUEST1, INSERT_ALL_REQUEST2); + compareInsertAllRequest(INSERT_ALL_REQUEST2, INSERT_ALL_REQUEST4); + compareInsertAllRequest(INSERT_ALL_REQUEST3, INSERT_ALL_REQUEST5); + compareInsertAllRequest(INSERT_ALL_REQUEST4, INSERT_ALL_REQUEST6); + compareInsertAllRequest(INSERT_ALL_REQUEST5, INSERT_ALL_REQUEST7); + compareInsertAllRequest(INSERT_ALL_REQUEST7, INSERT_ALL_REQUEST8); + compareInsertAllRequest(INSERT_ALL_REQUEST8, INSERT_ALL_REQUEST9); + compareInsertAllRequest(INSERT_ALL_REQUEST10, INSERT_ALL_REQUEST10); + compareInsertAllRequest(INSERT_ALL_REQUEST11, INSERT_ALL_REQUEST11); + } + + private void compareInsertAllRequest(InsertAllRequest expected, InsertAllRequest value) { + assertEquals(expected, value); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.table(), value.table()); + assertEquals(expected.rows(), value.rows()); + assertEquals(expected.ignoreUnknownValues(), value.ignoreUnknownValues()); + assertEquals(expected.skipInvalidRows(), value.skipInvalidRows()); + assertEquals(expected.templateSuffix(), value.templateSuffix()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/InsertAllResponseTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/InsertAllResponseTest.java new file mode 100644 index 000000000000..b2eb0458f27f --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/InsertAllResponseTest.java @@ -0,0 +1,77 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; + +import org.junit.Test; + +import java.util.List; +import java.util.Map; + +public class InsertAllResponseTest { + + private static final List ERRORS1 = ImmutableList.of( + new BigQueryError("reason1", "location1", "message1"), + new BigQueryError("reason2", "location2", "message2")); + private static final List ERRORS2 = ImmutableList.of( + new BigQueryError("reason3", "location3", "message3"), + new BigQueryError("reason4", "location4", "message4")); + private static final Map> ERRORS_MAP = ImmutableMap.of( + 0L, ERRORS1, 1L, ERRORS2); + private static final InsertAllResponse INSERT_ALL_RESPONSE = new InsertAllResponse(ERRORS_MAP); + private static final InsertAllResponse EMPTY_INSERT_ALL_RESPONSE = new InsertAllResponse(null); + + @Test + public void testConstructor() { + assertEquals(INSERT_ALL_RESPONSE, INSERT_ALL_RESPONSE); + } + + @Test + public void testErrorsFor() { + assertEquals(ERRORS1, INSERT_ALL_RESPONSE.errorsFor(0L)); + assertEquals(ERRORS2, INSERT_ALL_RESPONSE.errorsFor(1L)); + assertNull(INSERT_ALL_RESPONSE.errorsFor(2L)); + } + + @Test + public void testHasErrors() { + assertTrue(INSERT_ALL_RESPONSE.hasErrors()); + assertFalse(EMPTY_INSERT_ALL_RESPONSE.hasErrors()); + } + + @Test + public void testToPbAndFromPb() { + compareInsertAllResponse(INSERT_ALL_RESPONSE, + InsertAllResponse.fromPb(INSERT_ALL_RESPONSE.toPb())); + compareInsertAllResponse(EMPTY_INSERT_ALL_RESPONSE, + InsertAllResponse.fromPb(EMPTY_INSERT_ALL_RESPONSE.toPb())); + } + + private void compareInsertAllResponse(InsertAllResponse expected, InsertAllResponse value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.insertErrors(), value.insertErrors()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobIdTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobIdTest.java new file mode 100644 index 000000000000..740830f07544 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobIdTest.java @@ -0,0 +1,56 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class JobIdTest { + + private static final JobId JOB = JobId.of("job"); + private static final JobId JOB_COMPLETE = JobId.of("project", "job"); + + @Test + public void testOf() { + assertEquals(null, JOB.project()); + assertEquals("job", JOB.job()); + assertEquals("project", JOB_COMPLETE.project()); + assertEquals("job", JOB_COMPLETE.job()); + } + + @Test + public void testEquals() { + compareJobs(JOB, JobId.of("job")); + compareJobs(JOB_COMPLETE, JobId.of("project", "job")); + } + + @Test + public void testToPbAndFromPb() { + compareJobs(JOB, JobId.fromPb(JOB.toPb())); + compareJobs(JOB_COMPLETE, JobId.fromPb(JOB_COMPLETE.toPb())); + } + + private void compareJobs(JobId expected, JobId value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.project(), value.project()); + assertEquals(expected.job(), value.job()); + assertEquals(expected.hashCode(), value.hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobInfoTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobInfoTest.java new file mode 100644 index 000000000000..96bf8d1838c4 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobInfoTest.java @@ -0,0 +1,370 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; +import com.google.gcloud.bigquery.JobStatistics.ExtractStatistics; +import com.google.gcloud.bigquery.JobStatistics.LoadStatistics; +import com.google.gcloud.bigquery.JobStatistics.QueryStatistics; + +import org.junit.Test; + +import java.util.List; +import java.util.Map; + +public class JobInfoTest { + + private static final String ETAG = "etag"; + private static final String ID = "id"; + private static final String SELF_LINK = "selfLink"; + private static final String EMAIL = "email"; + private static final JobId JOB_ID = JobId.of("job"); + private static final JobStatus JOB_STATUS = new JobStatus(JobStatus.State.DONE); + private static final JobStatistics COPY_JOB_STATISTICS = JobStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .build(); + private static final ExtractStatistics EXTRACT_JOB_STATISTICS = + ExtractStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .destinationUriFileCounts(ImmutableList.of(42L)) + .build(); + private static final LoadStatistics LOAD_JOB_STATISTICS = + LoadStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .inputFiles(42L) + .outputBytes(1024L) + .inputBytes(2048L) + .outputRows(24L) + .build(); + private static final QueryStatistics QUERY_JOB_STATISTICS = + QueryStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .totalBytesProcessed(2048L) + .totalBytesBilled(1024L) + .cacheHit(false) + .billingTier(42) + .build(); + private static final TableId SOURCE_TABLE = TableId.of("dataset", "sourceTable"); + private static final TableId DESTINATION_TABLE = TableId.of("dataset", "destinationTable"); + private static final CreateDisposition CREATE_DISPOSITION = CreateDisposition.CREATE_IF_NEEDED; + private static final WriteDisposition WRITE_DISPOSITION = WriteDisposition.WRITE_APPEND; + private static final CopyJobConfiguration COPY_CONFIGURATION = + CopyJobConfiguration.builder(DESTINATION_TABLE, SOURCE_TABLE) + .createDisposition(CREATE_DISPOSITION) + .writeDisposition(WRITE_DISPOSITION) + .build(); + private static final List DESTINATION_URIS = ImmutableList.of("uri1", "uri2"); + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final DatasetId DATASET_ID = DatasetId.of("dataset"); + private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); + private static final String FIELD_DELIMITER = ","; + private static final String FORMAT = "CSV"; + private static final Boolean PRINT_HEADER = true; + private static final String COMPRESSION = "GZIP"; + private static final ExtractJobConfiguration EXTRACT_CONFIGURATION = + ExtractJobConfiguration.builder(TABLE_ID, DESTINATION_URIS) + .printHeader(PRINT_HEADER) + .fieldDelimiter(FIELD_DELIMITER) + .compression(COMPRESSION) + .format(FORMAT) + .build(); + private static final List PROJECTION_FIELDS = ImmutableList.of("field1", "field2"); + private static final Integer MAX_BAD_RECORDS = 42; + private static final Boolean IGNORE_UNKNOWN_VALUES = true; + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder().build(); + private static final ExternalDataConfiguration TABLE_CONFIGURATION = ExternalDataConfiguration + .builder(SOURCE_URIS, TABLE_SCHEMA, CSV_OPTIONS) + .compression(COMPRESSION) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .maxBadRecords(MAX_BAD_RECORDS) + .build(); + private static final LoadJobConfiguration LOAD_CONFIGURATION = + LoadJobConfiguration.builder(TABLE_ID, SOURCE_URIS) + .createDisposition(CREATE_DISPOSITION) + .writeDisposition(WRITE_DISPOSITION) + .formatOptions(CSV_OPTIONS) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .maxBadRecords(MAX_BAD_RECORDS) + .projectionFields(PROJECTION_FIELDS) + .schema(TABLE_SCHEMA) + .build(); + private static final String QUERY = "BigQuery SQL"; + private static final Map TABLE_DEFINITIONS = + ImmutableMap.of("tableName", TABLE_CONFIGURATION); + private static final QueryJobConfiguration.Priority PRIORITY = + QueryJobConfiguration.Priority.BATCH; + private static final boolean ALLOW_LARGE_RESULTS = true; + private static final boolean USE_QUERY_CACHE = false; + private static final boolean FLATTEN_RESULTS = true; + private static final List USER_DEFINED_FUNCTIONS = ImmutableList.of( + UserDefinedFunction.inline("Function"), UserDefinedFunction.fromUri("URI")); + private static final QueryJobConfiguration QUERY_CONFIGURATION = + QueryJobConfiguration.builder(QUERY) + .useQueryCache(USE_QUERY_CACHE) + .tableDefinitions(TABLE_DEFINITIONS) + .allowLargeResults(ALLOW_LARGE_RESULTS) + .createDisposition(CREATE_DISPOSITION) + .defaultDataset(DATASET_ID) + .destinationTable(TABLE_ID) + .writeDisposition(WRITE_DISPOSITION) + .priority(PRIORITY) + .flattenResults(FLATTEN_RESULTS) + .userDefinedFunctions(USER_DEFINED_FUNCTIONS) + .dryRun(true) + .build(); + private static final JobInfo COPY_JOB = JobInfo.builder(COPY_CONFIGURATION) + .jobId(JOB_ID) + .statistics(COPY_JOB_STATISTICS) + .jobId(JOB_ID) + .etag(ETAG) + .id(ID) + .selfLink(SELF_LINK) + .userEmail(EMAIL) + .status(JOB_STATUS) + .build(); + private static final JobInfo EXTRACT_JOB = JobInfo.builder(EXTRACT_CONFIGURATION) + .jobId(JOB_ID) + .statistics(EXTRACT_JOB_STATISTICS) + .jobId(JOB_ID) + .etag(ETAG) + .id(ID) + .selfLink(SELF_LINK) + .userEmail(EMAIL) + .status(JOB_STATUS) + .build(); + private static final JobInfo LOAD_JOB = JobInfo.builder(LOAD_CONFIGURATION) + .jobId(JOB_ID) + .statistics(LOAD_JOB_STATISTICS) + .jobId(JOB_ID) + .etag(ETAG) + .id(ID) + .selfLink(SELF_LINK) + .userEmail(EMAIL) + .status(JOB_STATUS) + .build(); + private static final JobInfo QUERY_JOB = JobInfo.builder(QUERY_CONFIGURATION) + .jobId(JOB_ID) + .statistics(QUERY_JOB_STATISTICS) + .jobId(JOB_ID) + .etag(ETAG) + .id(ID) + .selfLink(SELF_LINK) + .userEmail(EMAIL) + .status(JOB_STATUS) + .build(); + + + @Test + public void testToBuilder() { + compareJobInfo(COPY_JOB, COPY_JOB.toBuilder().build()); + compareJobInfo(EXTRACT_JOB, EXTRACT_JOB.toBuilder().build()); + compareJobInfo(LOAD_JOB, LOAD_JOB.toBuilder().build()); + compareJobInfo(QUERY_JOB, QUERY_JOB.toBuilder().build()); + JobInfo job = COPY_JOB.toBuilder() + .userEmail("newEmail") + .build(); + assertEquals("newEmail", job.userEmail()); + job = job.toBuilder().userEmail(EMAIL).build(); + compareJobInfo(COPY_JOB, job); + job = EXTRACT_JOB.toBuilder() + .userEmail("newEmail") + .build(); + assertEquals("newEmail", job.userEmail()); + job = job.toBuilder().userEmail(EMAIL).build(); + compareJobInfo(EXTRACT_JOB, job); + job = LOAD_JOB.toBuilder() + .userEmail("newEmail") + .build(); + assertEquals("newEmail", job.userEmail()); + job = job.toBuilder().userEmail(EMAIL).build(); + compareJobInfo(LOAD_JOB, job); + job = QUERY_JOB.toBuilder() + .userEmail("newEmail") + .build(); + assertEquals("newEmail", job.userEmail()); + job = job.toBuilder().userEmail(EMAIL).build(); + compareJobInfo(QUERY_JOB, job); + } + + @Test + public void testOf() { + JobInfo job = JobInfo.of(COPY_CONFIGURATION); + assertEquals(COPY_CONFIGURATION, job.configuration()); + job = JobInfo.of(EXTRACT_CONFIGURATION); + assertEquals(EXTRACT_CONFIGURATION, job.configuration()); + job = JobInfo.of(LOAD_CONFIGURATION); + assertEquals(LOAD_CONFIGURATION, job.configuration()); + job = JobInfo.of(QUERY_CONFIGURATION); + assertEquals(QUERY_CONFIGURATION, job.configuration()); + job = JobInfo.of(JOB_ID, COPY_CONFIGURATION); + assertEquals(JOB_ID, job.jobId()); + assertEquals(COPY_CONFIGURATION, job.configuration()); + job = JobInfo.of(JOB_ID, EXTRACT_CONFIGURATION); + assertEquals(JOB_ID, job.jobId()); + assertEquals(EXTRACT_CONFIGURATION, job.configuration()); + job = JobInfo.of(JOB_ID, LOAD_CONFIGURATION); + assertEquals(JOB_ID, job.jobId()); + assertEquals(LOAD_CONFIGURATION, job.configuration()); + job = JobInfo.of(JOB_ID, QUERY_CONFIGURATION); + assertEquals(JOB_ID, job.jobId()); + assertEquals(QUERY_CONFIGURATION, job.configuration()); + + } + + @Test + public void testToBuilderIncomplete() { + JobInfo job = JobInfo.of(COPY_CONFIGURATION); + compareJobInfo(job, job.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(ETAG, COPY_JOB.etag()); + assertEquals(ID, COPY_JOB.id()); + assertEquals(SELF_LINK, COPY_JOB.selfLink()); + assertEquals(EMAIL, COPY_JOB.userEmail()); + assertEquals(JOB_ID, COPY_JOB.jobId()); + assertEquals(JOB_STATUS, COPY_JOB.status()); + assertEquals(COPY_CONFIGURATION, COPY_JOB.configuration()); + assertEquals(COPY_JOB_STATISTICS, COPY_JOB.statistics()); + + assertEquals(ETAG, EXTRACT_JOB.etag()); + assertEquals(ID, EXTRACT_JOB.id()); + assertEquals(SELF_LINK, EXTRACT_JOB.selfLink()); + assertEquals(EMAIL, EXTRACT_JOB.userEmail()); + assertEquals(JOB_ID, EXTRACT_JOB.jobId()); + assertEquals(JOB_STATUS, EXTRACT_JOB.status()); + assertEquals(EXTRACT_CONFIGURATION, EXTRACT_JOB.configuration()); + assertEquals(EXTRACT_JOB_STATISTICS, EXTRACT_JOB.statistics()); + + assertEquals(ETAG, LOAD_JOB.etag()); + assertEquals(ID, LOAD_JOB.id()); + assertEquals(SELF_LINK, LOAD_JOB.selfLink()); + assertEquals(EMAIL, LOAD_JOB.userEmail()); + assertEquals(JOB_ID, LOAD_JOB.jobId()); + assertEquals(JOB_STATUS, LOAD_JOB.status()); + assertEquals(LOAD_CONFIGURATION, LOAD_JOB.configuration()); + assertEquals(LOAD_JOB_STATISTICS, LOAD_JOB.statistics()); + + assertEquals(ETAG, QUERY_JOB.etag()); + assertEquals(ID, QUERY_JOB.id()); + assertEquals(SELF_LINK, QUERY_JOB.selfLink()); + assertEquals(EMAIL, QUERY_JOB.userEmail()); + assertEquals(JOB_ID, QUERY_JOB.jobId()); + assertEquals(JOB_STATUS, QUERY_JOB.status()); + assertEquals(QUERY_CONFIGURATION, QUERY_JOB.configuration()); + assertEquals(QUERY_JOB_STATISTICS, QUERY_JOB.statistics()); + } + + @Test + public void testToPbAndFromPb() { + assertNotNull(COPY_JOB.toPb().getConfiguration().getCopy()); + assertNull(COPY_JOB.toPb().getConfiguration().getExtract()); + assertNull(COPY_JOB.toPb().getConfiguration().getLoad()); + assertNull(COPY_JOB.toPb().getConfiguration().getQuery()); + assertEquals(COPY_JOB_STATISTICS, JobStatistics.fromPb(COPY_JOB.statistics().toPb())); + compareJobInfo(COPY_JOB, JobInfo.fromPb(COPY_JOB.toPb())); + assertTrue(JobInfo.fromPb(COPY_JOB.toPb()).configuration() instanceof CopyJobConfiguration); + assertNull(EXTRACT_JOB.toPb().getConfiguration().getCopy()); + assertNotNull(EXTRACT_JOB.toPb().getConfiguration().getExtract()); + assertNull(EXTRACT_JOB.toPb().getConfiguration().getLoad()); + assertNull(EXTRACT_JOB.toPb().getConfiguration().getQuery()); + assertEquals(EXTRACT_JOB_STATISTICS, JobStatistics.fromPb(EXTRACT_JOB.statistics().toPb())); + compareJobInfo(EXTRACT_JOB, JobInfo.fromPb(EXTRACT_JOB.toPb())); + assertTrue( + JobInfo.fromPb(EXTRACT_JOB.toPb()).configuration() instanceof ExtractJobConfiguration); + assertTrue(JobInfo.fromPb(EXTRACT_JOB.toPb()).statistics() instanceof ExtractStatistics); + assertNull(LOAD_JOB.toPb().getConfiguration().getCopy()); + assertNull(LOAD_JOB.toPb().getConfiguration().getExtract()); + assertNotNull(LOAD_JOB.toPb().getConfiguration().getLoad()); + assertNull(LOAD_JOB.toPb().getConfiguration().getQuery()); + assertEquals(LOAD_JOB_STATISTICS, JobStatistics.fromPb(LOAD_JOB.statistics().toPb())); + compareJobInfo(LOAD_JOB, JobInfo.fromPb(LOAD_JOB.toPb())); + assertTrue(JobInfo.fromPb(LOAD_JOB.toPb()).configuration() instanceof LoadJobConfiguration); + assertTrue(JobInfo.fromPb(LOAD_JOB.toPb()).statistics() instanceof LoadStatistics); + assertNull(QUERY_JOB.toPb().getConfiguration().getCopy()); + assertNull(QUERY_JOB.toPb().getConfiguration().getExtract()); + assertNull(QUERY_JOB.toPb().getConfiguration().getLoad()); + assertNotNull(QUERY_JOB.toPb().getConfiguration().getQuery()); + assertEquals(QUERY_JOB_STATISTICS, JobStatistics.fromPb(QUERY_JOB.statistics().toPb())); + compareJobInfo(QUERY_JOB, JobInfo.fromPb(QUERY_JOB.toPb())); + assertTrue(JobInfo.fromPb(QUERY_JOB.toPb()).configuration() instanceof QueryJobConfiguration); + assertTrue(JobInfo.fromPb(QUERY_JOB.toPb()).statistics() instanceof QueryStatistics); + } + + @Test + public void testSetProjectId() { + CopyJobConfiguration copyConfiguration = COPY_JOB.setProjectId("p").configuration(); + assertEquals("p", copyConfiguration.destinationTable().project()); + for (TableId sourceTable : copyConfiguration.sourceTables()) { + assertEquals("p", sourceTable.project()); + } + ExtractJobConfiguration extractConfiguration = EXTRACT_JOB.setProjectId("p").configuration(); + assertEquals("p", extractConfiguration.sourceTable().project()); + LoadJobConfiguration loadConfiguration = LOAD_JOB.setProjectId("p").configuration(); + assertEquals("p", loadConfiguration.destinationTable().project()); + QueryJobConfiguration queryConfiguration = QUERY_JOB.setProjectId("p").configuration(); + assertEquals("p", queryConfiguration.defaultDataset().project()); + assertEquals("p", queryConfiguration.destinationTable().project()); + } + + private void compareJobInfo(JobInfo expected, JobInfo value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.id(), value.id()); + assertEquals(expected.jobId(), value.jobId()); + assertEquals(expected.selfLink(), value.selfLink()); + assertEquals(expected.status(), value.status()); + assertEquals(expected.statistics(), value.statistics()); + assertEquals(expected.userEmail(), value.userEmail()); + assertEquals(expected.configuration(), value.configuration()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobStatisticsTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobStatisticsTest.java new file mode 100644 index 000000000000..1ec67d034754 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobStatisticsTest.java @@ -0,0 +1,204 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.JobStatistics.ExtractStatistics; +import com.google.gcloud.bigquery.JobStatistics.LoadStatistics; +import com.google.gcloud.bigquery.JobStatistics.QueryStatistics; +import com.google.gcloud.bigquery.QueryStage.QueryStep; + +import org.junit.Test; + +import java.util.List; + +public class JobStatisticsTest { + + private static final Integer BILLING_TIER = 42; + private static final Boolean CACHE_HIT = true; + private static final Long TOTAL_BYTES_BILLED = 24L; + private static final Long TOTAL_BYTES_PROCESSED = 42L; + private static final Long INPUT_BYTES = 1L; + private static final Long INPUT_FILES = 2L; + private static final Long OUTPUT_BYTES = 3L; + private static final Long OUTPUT_ROWS = 4L; + private static final List FILE_COUNT = ImmutableList.of(1L, 2L, 3L); + private static final Long CREATION_TIME = 10L; + private static final Long END_TIME = 20L; + private static final Long START_TIME = 15L; + private static final ExtractStatistics EXTRACT_STATISTICS = ExtractStatistics.builder() + .creationTime(CREATION_TIME) + .endTime(END_TIME) + .startTime(START_TIME) + .destinationUriFileCounts(FILE_COUNT) + .build(); + private static final LoadStatistics LOAD_STATISTICS = LoadStatistics.builder() + .creationTime(CREATION_TIME) + .endTime(END_TIME) + .startTime(START_TIME) + .inputBytes(INPUT_BYTES) + .inputFiles(INPUT_FILES) + .outputBytes(OUTPUT_BYTES) + .outputRows(OUTPUT_ROWS) + .build(); + private static final LoadStatistics LOAD_STATISTICS_INCOMPLETE = LoadStatistics.builder() + .creationTime(CREATION_TIME) + .endTime(END_TIME) + .startTime(START_TIME) + .inputBytes(INPUT_BYTES) + .inputFiles(INPUT_FILES) + .build(); + private static final List SUBSTEPS1 = ImmutableList.of("substep1", "substep2"); + private static final List SUBSTEPS2 = ImmutableList.of("substep3", "substep4"); + private static final QueryStep QUERY_STEP1 = new QueryStep("KIND", SUBSTEPS1); + private static final QueryStep QUERY_STEP2 = new QueryStep("KIND", SUBSTEPS2); + private static final QueryStage QUERY_STAGE = QueryStage.builder() + .computeRatioAvg(1.1) + .computeRatioMax(2.2) + .id(42L) + .name("stage") + .readRatioAvg(3.3) + .readRatioMax(4.4) + .recordsRead(5L) + .recordsWritten(6L) + .steps(ImmutableList.of(QUERY_STEP1, QUERY_STEP2)) + .waitRatioAvg(7.7) + .waitRatioMax(8.8) + .writeRatioAvg(9.9) + .writeRatioMax(10.10) + .build(); + private static final List QUERY_PLAN = ImmutableList.of(QUERY_STAGE); + private static final QueryStatistics QUERY_STATISTICS = QueryStatistics.builder() + .creationTime(CREATION_TIME) + .endTime(END_TIME) + .startTime(START_TIME) + .billingTier(BILLING_TIER) + .cacheHit(CACHE_HIT) + .totalBytesBilled(TOTAL_BYTES_BILLED) + .totalBytesProcessed(TOTAL_BYTES_PROCESSED) + .queryPlan(QUERY_PLAN) + .build(); + private static final QueryStatistics QUERY_STATISTICS_INCOMPLETE = QueryStatistics.builder() + .creationTime(CREATION_TIME) + .endTime(END_TIME) + .startTime(START_TIME) + .billingTier(BILLING_TIER) + .cacheHit(CACHE_HIT) + .build(); + private static final JobStatistics STATISTICS = JobStatistics.builder() + .creationTime(CREATION_TIME) + .endTime(END_TIME) + .startTime(START_TIME) + .build(); + + @Test + public void testBuilder() { + assertEquals(CREATION_TIME, STATISTICS.creationTime()); + assertEquals(START_TIME, STATISTICS.startTime()); + assertEquals(END_TIME, STATISTICS.endTime()); + + assertEquals(CREATION_TIME, EXTRACT_STATISTICS.creationTime()); + assertEquals(START_TIME, EXTRACT_STATISTICS.startTime()); + assertEquals(END_TIME, EXTRACT_STATISTICS.endTime()); + assertEquals(FILE_COUNT, EXTRACT_STATISTICS.destinationUriFileCounts()); + + assertEquals(CREATION_TIME, LOAD_STATISTICS.creationTime()); + assertEquals(START_TIME, LOAD_STATISTICS.startTime()); + assertEquals(END_TIME, LOAD_STATISTICS.endTime()); + assertEquals(INPUT_BYTES, LOAD_STATISTICS.inputBytes()); + assertEquals(INPUT_FILES, LOAD_STATISTICS.inputFiles()); + assertEquals(OUTPUT_BYTES, LOAD_STATISTICS.outputBytes()); + assertEquals(OUTPUT_ROWS, LOAD_STATISTICS.outputRows()); + + assertEquals(CREATION_TIME, QUERY_STATISTICS.creationTime()); + assertEquals(START_TIME, QUERY_STATISTICS.startTime()); + assertEquals(END_TIME, QUERY_STATISTICS.endTime()); + assertEquals(BILLING_TIER, QUERY_STATISTICS.billingTier()); + assertEquals(CACHE_HIT, QUERY_STATISTICS.cacheHit()); + assertEquals(TOTAL_BYTES_BILLED, QUERY_STATISTICS.totalBytesBilled()); + assertEquals(TOTAL_BYTES_PROCESSED, QUERY_STATISTICS.totalBytesProcessed()); + assertEquals(TOTAL_BYTES_PROCESSED, QUERY_STATISTICS.totalBytesProcessed()); + assertEquals(QUERY_PLAN, QUERY_STATISTICS.queryPlan()); + + assertEquals(CREATION_TIME, LOAD_STATISTICS_INCOMPLETE.creationTime()); + assertEquals(START_TIME, LOAD_STATISTICS_INCOMPLETE.startTime()); + assertEquals(END_TIME, LOAD_STATISTICS_INCOMPLETE.endTime()); + assertEquals(INPUT_BYTES, LOAD_STATISTICS_INCOMPLETE.inputBytes()); + assertEquals(INPUT_FILES, LOAD_STATISTICS_INCOMPLETE.inputFiles()); + assertEquals(null, LOAD_STATISTICS_INCOMPLETE.outputBytes()); + assertEquals(null, LOAD_STATISTICS_INCOMPLETE.outputRows()); + + assertEquals(CREATION_TIME, QUERY_STATISTICS_INCOMPLETE.creationTime()); + assertEquals(START_TIME, QUERY_STATISTICS_INCOMPLETE.startTime()); + assertEquals(END_TIME, QUERY_STATISTICS_INCOMPLETE.endTime()); + assertEquals(BILLING_TIER, QUERY_STATISTICS_INCOMPLETE.billingTier()); + assertEquals(CACHE_HIT, QUERY_STATISTICS_INCOMPLETE.cacheHit()); + assertEquals(null, QUERY_STATISTICS_INCOMPLETE.totalBytesBilled()); + assertEquals(null, QUERY_STATISTICS_INCOMPLETE.totalBytesProcessed()); + assertEquals(null, QUERY_STATISTICS_INCOMPLETE.queryPlan()); + } + + @Test + public void testToPbAndFromPb() { + compareExtractStatistics(EXTRACT_STATISTICS, + ExtractStatistics.fromPb(EXTRACT_STATISTICS.toPb())); + compareLoadStatistics(LOAD_STATISTICS, LoadStatistics.fromPb(LOAD_STATISTICS.toPb())); + compareQueryStatistics(QUERY_STATISTICS, QueryStatistics.fromPb(QUERY_STATISTICS.toPb())); + compareStatistics(STATISTICS, JobStatistics.fromPb(STATISTICS.toPb())); + + compareLoadStatistics(LOAD_STATISTICS_INCOMPLETE, + LoadStatistics.fromPb(LOAD_STATISTICS_INCOMPLETE.toPb())); + compareQueryStatistics(QUERY_STATISTICS_INCOMPLETE, + QueryStatistics.fromPb(QUERY_STATISTICS_INCOMPLETE.toPb())); + } + + private void compareExtractStatistics(ExtractStatistics expected, ExtractStatistics value) { + assertEquals(expected, value); + compareStatistics(expected, value); + assertEquals(expected.destinationUriFileCounts(), value.destinationUriFileCounts()); + } + + private void compareLoadStatistics(LoadStatistics expected, LoadStatistics value) { + assertEquals(expected, value); + compareStatistics(expected, value); + assertEquals(expected.inputBytes(), value.inputBytes()); + assertEquals(expected.inputFiles(), value.inputFiles()); + assertEquals(expected.outputBytes(), value.outputBytes()); + assertEquals(expected.outputRows(), value.outputRows()); + } + + private void compareQueryStatistics(QueryStatistics expected, QueryStatistics value) { + assertEquals(expected, value); + compareStatistics(expected, value); + assertEquals(expected.billingTier(), value.billingTier()); + assertEquals(expected.cacheHit(), value.cacheHit()); + assertEquals(expected.totalBytesBilled(), value.totalBytesBilled()); + assertEquals(expected.totalBytesProcessed(), value.totalBytesProcessed()); + assertEquals(expected.queryPlan(), value.queryPlan()); + } + + private void compareStatistics(JobStatistics expected, JobStatistics value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.creationTime(), value.creationTime()); + assertEquals(expected.endTime(), value.endTime()); + assertEquals(expected.startTime(), value.startTime()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobStatusTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobStatusTest.java new file mode 100644 index 000000000000..c44386a3e72c --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobStatusTest.java @@ -0,0 +1,67 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class JobStatusTest { + + private static final JobStatus.State STATE = JobStatus.State.DONE; + private static final BigQueryError ERROR = + new BigQueryError("reason", "location", "message", "debugInfo"); + private static final List ALL_ERRORS = ImmutableList.of( + new BigQueryError("reason1", "location1", "message1", "debugInfo1"), + new BigQueryError("reason2", "location2", "message2", "debugInfo2")); + private static final JobStatus JOB_STATUS = new JobStatus(STATE, ERROR, ALL_ERRORS); + private static final JobStatus JOB_STATUS_INCOMPLETE1 = new JobStatus(STATE, ERROR, null); + private static final JobStatus JOB_STATUS_INCOMPLETE2 = new JobStatus(STATE, null, null); + + @Test + public void testConstructor() { + assertEquals(STATE, JOB_STATUS.state()); + assertEquals(ERROR, JOB_STATUS.error()); + assertEquals(ALL_ERRORS, JOB_STATUS.executionErrors()); + + assertEquals(STATE, JOB_STATUS_INCOMPLETE1.state()); + assertEquals(ERROR, JOB_STATUS_INCOMPLETE1.error()); + assertEquals(null, JOB_STATUS_INCOMPLETE1.executionErrors()); + + assertEquals(STATE, JOB_STATUS_INCOMPLETE2.state()); + assertEquals(null, JOB_STATUS_INCOMPLETE2.error()); + assertEquals(null, JOB_STATUS_INCOMPLETE2.executionErrors()); + } + + @Test + public void testToPbAndFromPb() { + compareStatus(JOB_STATUS, JobStatus.fromPb(JOB_STATUS.toPb())); + compareStatus(JOB_STATUS_INCOMPLETE1, JobStatus.fromPb(JOB_STATUS_INCOMPLETE1.toPb())); + compareStatus(JOB_STATUS_INCOMPLETE2, JobStatus.fromPb(JOB_STATUS_INCOMPLETE2.toPb())); + } + + private void compareStatus(JobStatus expected, JobStatus value) { + assertEquals(expected, value); + assertEquals(expected.state(), value.state()); + assertEquals(expected.error(), value.error()); + assertEquals(expected.executionErrors(), value.executionErrors()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobTest.java new file mode 100644 index 000000000000..90b602d978e0 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobTest.java @@ -0,0 +1,176 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.easymock.EasyMock.createStrictMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class JobTest { + + private static final JobId JOB_ID = JobId.of("dataset", "job"); + private static final TableId TABLE_ID1 = TableId.of("dataset", "table1"); + private static final TableId TABLE_ID2 = TableId.of("dataset", "table2"); + private static final JobInfo JOB_INFO = + JobInfo.of(JOB_ID, CopyJobConfiguration.of(TABLE_ID1, TABLE_ID2)); + + private BigQuery bigquery; + private Job job; + + @Before + public void setUp() throws Exception { + bigquery = createStrictMock(BigQuery.class); + job = new Job(bigquery, JOB_INFO); + } + + @After + public void tearDown() throws Exception { + verify(bigquery); + } + + @Test + public void testInfo() throws Exception { + assertEquals(JOB_INFO, job.info()); + replay(bigquery); + } + + @Test + public void testBigQuery() throws Exception { + assertSame(bigquery, job.bigquery()); + replay(bigquery); + } + + @Test + public void testExists_True() throws Exception { + BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields()}; + expect(bigquery.getJob(JOB_INFO.jobId(), expectedOptions)).andReturn(JOB_INFO); + replay(bigquery); + assertTrue(job.exists()); + } + + @Test + public void testExists_False() throws Exception { + BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields()}; + expect(bigquery.getJob(JOB_INFO.jobId(), expectedOptions)).andReturn(null); + replay(bigquery); + assertFalse(job.exists()); + } + + @Test + public void testIsDone_True() throws Exception { + JobStatus status = createStrictMock(JobStatus.class); + expect(status.state()).andReturn(JobStatus.State.DONE); + BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; + expect(bigquery.getJob(JOB_INFO.jobId(), expectedOptions)) + .andReturn(JOB_INFO.toBuilder().status(status).build()); + replay(status, bigquery); + assertTrue(job.isDone()); + verify(status); + } + + @Test + public void testIsDone_False() throws Exception { + JobStatus status = createStrictMock(JobStatus.class); + expect(status.state()).andReturn(JobStatus.State.RUNNING); + BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; + expect(bigquery.getJob(JOB_INFO.jobId(), expectedOptions)) + .andReturn(JOB_INFO.toBuilder().status(status).build()); + replay(status, bigquery); + assertFalse(job.isDone()); + verify(status); + } + + @Test + public void testIsDone_NotExists() throws Exception { + BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; + expect(bigquery.getJob(JOB_INFO.jobId(), expectedOptions)).andReturn(null); + replay(bigquery); + assertFalse(job.isDone()); + } + + @Test + public void testReload() throws Exception { + JobInfo updatedInfo = JOB_INFO.toBuilder().etag("etag").build(); + expect(bigquery.getJob(JOB_INFO.jobId().job())).andReturn(updatedInfo); + replay(bigquery); + Job updatedJob = job.reload(); + assertSame(bigquery, updatedJob.bigquery()); + assertEquals(updatedInfo, updatedJob.info()); + } + + @Test + public void testReloadNull() throws Exception { + expect(bigquery.getJob(JOB_INFO.jobId().job())).andReturn(null); + replay(bigquery); + assertNull(job.reload()); + } + + @Test + public void testReloadWithOptions() throws Exception { + JobInfo updatedInfo = JOB_INFO.toBuilder().etag("etag").build(); + expect(bigquery.getJob(JOB_INFO.jobId().job(), BigQuery.JobOption.fields())) + .andReturn(updatedInfo); + replay(bigquery); + Job updatedJob = job.reload(BigQuery.JobOption.fields()); + assertSame(bigquery, updatedJob.bigquery()); + assertEquals(updatedInfo, updatedJob.info()); + } + + @Test + public void testCancel() throws Exception { + expect(bigquery.cancel(JOB_INFO.jobId())).andReturn(true); + replay(bigquery); + assertTrue(job.cancel()); + } + + @Test + public void testGet() throws Exception { + expect(bigquery.getJob(JOB_INFO.jobId().job())).andReturn(JOB_INFO); + replay(bigquery); + Job loadedJob = Job.get(bigquery, JOB_INFO.jobId().job()); + assertNotNull(loadedJob); + assertEquals(JOB_INFO, loadedJob.info()); + } + + @Test + public void testGetNull() throws Exception { + expect(bigquery.getJob(JOB_INFO.jobId().job())).andReturn(null); + replay(bigquery); + assertNull(Job.get(bigquery, JOB_INFO.jobId().job())); + } + + @Test + public void testGetWithOptions() throws Exception { + expect(bigquery.getJob(JOB_INFO.jobId().job(), BigQuery.JobOption.fields())) + .andReturn(JOB_INFO); + replay(bigquery); + Job loadedJob = Job.get(bigquery, JOB_INFO.jobId().job(), BigQuery.JobOption.fields()); + assertNotNull(loadedJob); + assertEquals(JOB_INFO, loadedJob.info()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/LoadJobConfigurationTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/LoadJobConfigurationTest.java new file mode 100644 index 000000000000..88ae6a4fc1b8 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/LoadJobConfigurationTest.java @@ -0,0 +1,140 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; + +import org.junit.Test; + +import java.nio.charset.StandardCharsets; +import java.util.List; + +public class LoadJobConfigurationTest { + + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder() + .allowJaggedRows(true) + .allowQuotedNewLines(false) + .encoding(StandardCharsets.UTF_8) + .build(); + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final CreateDisposition CREATE_DISPOSITION = CreateDisposition.CREATE_IF_NEEDED; + private static final WriteDisposition WRITE_DISPOSITION = WriteDisposition.WRITE_APPEND; + private static final Integer MAX_BAD_RECORDS = 42; + private static final String FORMAT = "CSV"; + private static final Boolean IGNORE_UNKNOWN_VALUES = true; + private static final List PROJECTION_FIELDS = ImmutableList.of("field1", "field2"); + private static final Field FIELD_SCHEMA = Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription") + .build(); + private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA); + private static final LoadJobConfiguration LOAD_CONFIGURATION = + LoadJobConfiguration.builder(TABLE_ID, SOURCE_URIS) + .createDisposition(CREATE_DISPOSITION) + .writeDisposition(WRITE_DISPOSITION) + .formatOptions(CSV_OPTIONS) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .maxBadRecords(MAX_BAD_RECORDS) + .projectionFields(PROJECTION_FIELDS) + .schema(TABLE_SCHEMA) + .build(); + + @Test + public void testToBuilder() { + compareLoadJobConfiguration(LOAD_CONFIGURATION, LOAD_CONFIGURATION.toBuilder().build()); + LoadJobConfiguration configuration = LOAD_CONFIGURATION.toBuilder() + .destinationTable(TableId.of("dataset", "newTable")) + .build(); + assertEquals("newTable", configuration.destinationTable().table()); + configuration = configuration.toBuilder().destinationTable(TABLE_ID).build(); + compareLoadJobConfiguration(LOAD_CONFIGURATION, configuration); + } + + @Test + public void testOf() { + LoadJobConfiguration configuration = LoadJobConfiguration.of(TABLE_ID, SOURCE_URIS); + assertEquals(TABLE_ID, configuration.destinationTable()); + assertEquals(SOURCE_URIS, configuration.sourceUris()); + configuration = LoadJobConfiguration.of(TABLE_ID, SOURCE_URIS, CSV_OPTIONS); + assertEquals(TABLE_ID, configuration.destinationTable()); + assertEquals(FORMAT, configuration.format()); + assertEquals(CSV_OPTIONS, configuration.csvOptions()); + assertEquals(SOURCE_URIS, configuration.sourceUris()); + configuration = LoadJobConfiguration.of(TABLE_ID, "uri1"); + assertEquals(TABLE_ID, configuration.destinationTable()); + assertEquals(ImmutableList.of("uri1"), configuration.sourceUris()); + configuration = LoadJobConfiguration.of(TABLE_ID, "uri1", CSV_OPTIONS); + assertEquals(TABLE_ID, configuration.destinationTable()); + assertEquals(FORMAT, configuration.format()); + assertEquals(CSV_OPTIONS, configuration.csvOptions()); + assertEquals(ImmutableList.of("uri1"), configuration.sourceUris()); + } + + @Test + public void testToBuilderIncomplete() { + LoadJobConfiguration configuration = LoadJobConfiguration.of(TABLE_ID, SOURCE_URIS); + compareLoadJobConfiguration(configuration, configuration.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(TABLE_ID, LOAD_CONFIGURATION.destinationTable()); + assertEquals(CREATE_DISPOSITION, LOAD_CONFIGURATION.createDisposition()); + assertEquals(WRITE_DISPOSITION, LOAD_CONFIGURATION.writeDisposition()); + assertEquals(CSV_OPTIONS, LOAD_CONFIGURATION.csvOptions()); + assertEquals(FORMAT, LOAD_CONFIGURATION.format()); + assertEquals(IGNORE_UNKNOWN_VALUES, LOAD_CONFIGURATION.ignoreUnknownValues()); + assertEquals(MAX_BAD_RECORDS, LOAD_CONFIGURATION.maxBadRecords()); + assertEquals(PROJECTION_FIELDS, LOAD_CONFIGURATION.projectionFields()); + assertEquals(TABLE_SCHEMA, LOAD_CONFIGURATION.schema()); + } + + @Test + public void testToPbAndFromPb() { + compareLoadJobConfiguration(LOAD_CONFIGURATION, + LoadJobConfiguration.fromPb(LOAD_CONFIGURATION.toPb())); + LoadJobConfiguration configuration = LoadJobConfiguration.of(TABLE_ID, SOURCE_URIS); + compareLoadJobConfiguration(configuration, LoadJobConfiguration.fromPb(configuration.toPb())); + } + + @Test + public void testSetProjectId() { + LoadConfiguration configuration = LOAD_CONFIGURATION.setProjectId("p"); + assertEquals("p", configuration.destinationTable().project()); + } + + private void compareLoadJobConfiguration(LoadJobConfiguration expected, + LoadJobConfiguration value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.destinationTable(), value.destinationTable()); + assertEquals(expected.createDisposition(), value.createDisposition()); + assertEquals(expected.writeDisposition(), value.writeDisposition()); + assertEquals(expected.csvOptions(), value.csvOptions()); + assertEquals(expected.format(), value.format()); + assertEquals(expected.ignoreUnknownValues(), value.ignoreUnknownValues()); + assertEquals(expected.maxBadRecords(), value.maxBadRecords()); + assertEquals(expected.projectionFields(), value.projectionFields()); + assertEquals(expected.schema(), value.schema()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/OptionTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/OptionTest.java new file mode 100644 index 000000000000..225fc284b203 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/OptionTest.java @@ -0,0 +1,38 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.gcloud.spi.BigQueryRpc; + +import org.junit.Test; + +public class OptionTest { + + @Test + public void testOption() { + Option option = new Option(BigQueryRpc.Option.PAGE_TOKEN, "token"); + assertEquals(BigQueryRpc.Option.PAGE_TOKEN, option.rpcOption()); + assertEquals("token", option.value()); + } + + @Test(expected = NullPointerException.class) + public void testNullRpcOption() { + new Option(null, "token"); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryJobConfigurationTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryJobConfigurationTest.java new file mode 100644 index 000000000000..69b2f992fe22 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryJobConfigurationTest.java @@ -0,0 +1,169 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; +import com.google.gcloud.bigquery.QueryJobConfiguration.Priority; + +import org.junit.Test; + +import java.util.List; +import java.util.Map; + +public class QueryJobConfigurationTest { + + private static final String QUERY = "BigQuery SQL"; + private static final DatasetId DATASET_ID = DatasetId.of("dataset"); + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); + private static final Integer MAX_BAD_RECORDS = 42; + private static final Boolean IGNORE_UNKNOWN_VALUES = true; + private static final String COMPRESSION = "GZIP"; + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder().build(); + private static final ExternalDataConfiguration TABLE_CONFIGURATION = ExternalDataConfiguration + .builder(SOURCE_URIS, TABLE_SCHEMA, CSV_OPTIONS) + .compression(COMPRESSION) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .maxBadRecords(MAX_BAD_RECORDS) + .build(); + private static final Map TABLE_DEFINITIONS = + ImmutableMap.of("tableName", TABLE_CONFIGURATION); + private static final CreateDisposition CREATE_DISPOSITION = CreateDisposition.CREATE_IF_NEEDED; + private static final WriteDisposition WRITE_DISPOSITION = WriteDisposition.WRITE_APPEND; + private static final Priority PRIORITY = Priority.BATCH; + private static final boolean ALLOW_LARGE_RESULTS = true; + private static final boolean USE_QUERY_CACHE = false; + private static final boolean FLATTEN_RESULTS = true; + private static final List USER_DEFINED_FUNCTIONS = ImmutableList.of( + UserDefinedFunction.inline("Function"), UserDefinedFunction.fromUri("URI")); + private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION = + QueryJobConfiguration.builder(QUERY) + .useQueryCache(USE_QUERY_CACHE) + .tableDefinitions(TABLE_DEFINITIONS) + .allowLargeResults(ALLOW_LARGE_RESULTS) + .createDisposition(CREATE_DISPOSITION) + .defaultDataset(DATASET_ID) + .destinationTable(TABLE_ID) + .writeDisposition(WRITE_DISPOSITION) + .priority(PRIORITY) + .flattenResults(FLATTEN_RESULTS) + .userDefinedFunctions(USER_DEFINED_FUNCTIONS) + .dryRun(true) + .build(); + + @Test + public void testToBuilder() { + compareQueryJobConfiguration(QUERY_JOB_CONFIGURATION, + QUERY_JOB_CONFIGURATION.toBuilder().build()); + QueryJobConfiguration job = QUERY_JOB_CONFIGURATION.toBuilder() + .query("New BigQuery SQL") + .build(); + assertEquals("New BigQuery SQL", job.query()); + job = job.toBuilder().query(QUERY).build(); + compareQueryJobConfiguration(QUERY_JOB_CONFIGURATION, job); + } + + @Test + public void testOf() { + QueryJobConfiguration job = QueryJobConfiguration.of(QUERY); + assertEquals(QUERY, job.query()); + } + + @Test + public void testToBuilderIncomplete() { + QueryJobConfiguration job = QueryJobConfiguration.of(QUERY); + compareQueryJobConfiguration(job, job.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(ALLOW_LARGE_RESULTS, QUERY_JOB_CONFIGURATION.allowLargeResults()); + assertEquals(CREATE_DISPOSITION, QUERY_JOB_CONFIGURATION.createDisposition()); + assertEquals(DATASET_ID, QUERY_JOB_CONFIGURATION.defaultDataset()); + assertEquals(TABLE_ID, QUERY_JOB_CONFIGURATION.destinationTable()); + assertEquals(FLATTEN_RESULTS, QUERY_JOB_CONFIGURATION.flattenResults()); + assertEquals(PRIORITY, QUERY_JOB_CONFIGURATION.priority()); + assertEquals(QUERY, QUERY_JOB_CONFIGURATION.query()); + assertEquals(TABLE_DEFINITIONS, QUERY_JOB_CONFIGURATION.tableDefinitions()); + assertEquals(USE_QUERY_CACHE, QUERY_JOB_CONFIGURATION.useQueryCache()); + assertEquals(USER_DEFINED_FUNCTIONS, QUERY_JOB_CONFIGURATION.userDefinedFunctions()); + assertEquals(WRITE_DISPOSITION, QUERY_JOB_CONFIGURATION.writeDisposition()); + assertTrue(QUERY_JOB_CONFIGURATION.dryRun()); + } + + @Test + public void testToPbAndFromPb() { + assertNotNull(QUERY_JOB_CONFIGURATION.toPb().getQuery()); + assertNull(QUERY_JOB_CONFIGURATION.toPb().getExtract()); + assertNull(QUERY_JOB_CONFIGURATION.toPb().getCopy()); + assertNull(QUERY_JOB_CONFIGURATION.toPb().getLoad()); + compareQueryJobConfiguration(QUERY_JOB_CONFIGURATION, + QueryJobConfiguration.fromPb(QUERY_JOB_CONFIGURATION.toPb())); + QueryJobConfiguration job = QueryJobConfiguration.of(QUERY); + compareQueryJobConfiguration(job, QueryJobConfiguration.fromPb(job.toPb())); + } + + @Test + public void testSetProjectId() { + QueryJobConfiguration configuration = QUERY_JOB_CONFIGURATION.setProjectId("p"); + assertEquals("p", configuration.defaultDataset().project()); + assertEquals("p", configuration.destinationTable().project()); + } + + private void compareQueryJobConfiguration(QueryJobConfiguration expected, + QueryJobConfiguration value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.dryRun(), value.dryRun()); + assertEquals(expected.allowLargeResults(), value.allowLargeResults()); + assertEquals(expected.createDisposition(), value.createDisposition()); + assertEquals(expected.defaultDataset(), value.defaultDataset()); + assertEquals(expected.destinationTable(), value.destinationTable()); + assertEquals(expected.flattenResults(), value.flattenResults()); + assertEquals(expected.priority(), value.priority()); + assertEquals(expected.query(), value.query()); + assertEquals(expected.tableDefinitions(), value.tableDefinitions()); + assertEquals(expected.useQueryCache(), value.useQueryCache()); + assertEquals(expected.userDefinedFunctions(), value.userDefinedFunctions()); + assertEquals(expected.writeDisposition(), value.writeDisposition()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryRequestTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryRequestTest.java new file mode 100644 index 000000000000..370b4d614cbf --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryRequestTest.java @@ -0,0 +1,108 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +public class QueryRequestTest { + + private static final String QUERY = "BigQuery SQL"; + private static final DatasetId DATASET_ID = DatasetId.of("dataset"); + private static final Boolean USE_QUERY_CACHE = true; + private static final Boolean DRY_RUN = false; + private static final Long MAX_RESULTS = 42L; + private static final Long MAX_WAIT_TIME = 42000L; + private static final QueryRequest QUERY_REQUEST = QueryRequest.builder(QUERY) + .useQueryCache(USE_QUERY_CACHE) + .defaultDataset(DATASET_ID) + .dryRun(DRY_RUN) + .maxResults(MAX_RESULTS) + .maxWaitTime(MAX_WAIT_TIME) + .build(); + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @Test + public void testToBuilder() { + compareQueryRequest(QUERY_REQUEST, QUERY_REQUEST.toBuilder().build()); + QueryRequest queryRequest = QUERY_REQUEST.toBuilder() + .query("New BigQuery SQL") + .build(); + assertEquals("New BigQuery SQL", queryRequest.query()); + queryRequest = queryRequest.toBuilder().query(QUERY).build(); + compareQueryRequest(QUERY_REQUEST, queryRequest); + } + + @Test + public void testToBuilderIncomplete() { + QueryRequest queryRequest = QueryRequest.of(QUERY); + compareQueryRequest(queryRequest, queryRequest.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(QUERY, QUERY_REQUEST.query()); + assertEquals(USE_QUERY_CACHE, QUERY_REQUEST.useQueryCache()); + assertEquals(DATASET_ID, QUERY_REQUEST.defaultDataset()); + assertEquals(DRY_RUN, QUERY_REQUEST.dryRun()); + assertEquals(MAX_RESULTS, QUERY_REQUEST.maxResults()); + assertEquals(MAX_WAIT_TIME, QUERY_REQUEST.maxWaitTime()); + thrown.expect(NullPointerException.class); + QueryRequest.builder(null); + } + + @Test + public void testOf() { + QueryRequest request = QueryRequest.of(QUERY); + assertEquals(QUERY, request.query()); + assertNull(request.useQueryCache()); + assertNull(request.defaultDataset()); + assertNull(request.dryRun()); + assertNull(request.maxResults()); + assertNull(request.maxWaitTime()); + thrown.expect(NullPointerException.class); + QueryRequest.of(null); + } + + @Test + public void testToPbAndFromPb() { + compareQueryRequest(QUERY_REQUEST, QueryRequest.fromPb(QUERY_REQUEST.toPb())); + QueryRequest queryRequest = QueryRequest.of(QUERY); + compareQueryRequest(queryRequest, QueryRequest.fromPb(queryRequest.toPb())); + } + + @Test + public void testSetProjectId() { + assertEquals("p", QUERY_REQUEST.setProjectId("p").defaultDataset().project()); + } + + private void compareQueryRequest(QueryRequest expected, QueryRequest value) { + assertEquals(expected, value); + assertEquals(expected.query(), value.query()); + assertEquals(expected.useQueryCache(), value.useQueryCache()); + assertEquals(expected.defaultDataset(), value.defaultDataset()); + assertEquals(expected.dryRun(), value.dryRun()); + assertEquals(expected.maxResults(), value.maxResults()); + assertEquals(expected.maxWaitTime(), value.maxWaitTime()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResponseTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResponseTest.java new file mode 100644 index 000000000000..08e885c8b3aa --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResponseTest.java @@ -0,0 +1,107 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class QueryResponseTest { + + private static final String ETAG = "etag"; + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Schema SCHEMA = Schema.of(FIELD_SCHEMA1); + private static final JobId JOB_ID = JobId.of("project", "job"); + private static final Long TOTAL_ROWS = 42L; + private static final QueryResult.QueryResultsPageFetcher FETCHER = + new QueryResult.QueryResultsPageFetcher() { + @Override + public QueryResult nextPage() { + return null; + } + }; + private static final Long TOTAL_BYTES_PROCESSED = 4200L; + private static final Boolean JOB_COMPLETE = true; + private static final List ERRORS = ImmutableList.of( + new BigQueryError("reason1", "location1", "message1", "debugInfo1"), + new BigQueryError("reason2", "location2", "message2", "debugInfo2") + ); + private static final Boolean CACHE_HIT = false; + private static final QueryResult QUERY_RESULT = QueryResult.builder() + .schema(SCHEMA) + .totalRows(TOTAL_ROWS) + .totalBytesProcessed(TOTAL_BYTES_PROCESSED) + .cursor("cursor") + .pageFetcher(FETCHER) + .results(ImmutableList.>of()) + .cacheHit(CACHE_HIT) + .build(); + private static final QueryResponse QUERY_RESPONSE = QueryResponse.builder() + .etag(ETAG) + .jobId(JOB_ID) + .jobCompleted(JOB_COMPLETE) + .executionErrors(ERRORS) + .result(QUERY_RESULT) + .build(); + + @Test + public void testBuilder() { + assertEquals(ETAG, QUERY_RESPONSE.etag()); + assertEquals(QUERY_RESULT, QUERY_RESPONSE.result()); + assertEquals(JOB_ID, QUERY_RESPONSE.jobId()); + assertEquals(JOB_COMPLETE, QUERY_RESPONSE.jobCompleted()); + assertEquals(ERRORS, QUERY_RESPONSE.executionErrors()); + assertTrue(QUERY_RESPONSE.hasErrors()); + } + + @Test + public void testBuilderIncomplete() { + QueryResponse queryResponse = QueryResponse.builder().jobCompleted(false).build(); + assertNull(queryResponse.etag()); + assertNull(queryResponse.result()); + assertNull(queryResponse.jobId()); + assertFalse(queryResponse.jobCompleted()); + assertEquals(ImmutableList.of(), queryResponse.executionErrors()); + assertFalse(queryResponse.hasErrors()); + } + + @Test + public void testEquals() { + compareQueryResponse(QUERY_RESPONSE, QUERY_RESPONSE); + } + + private void compareQueryResponse(QueryResponse expected, QueryResponse value) { + assertEquals(expected, value); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.result(), value.result()); + assertEquals(expected.jobId(), value.jobId()); + assertEquals(expected.jobCompleted(), value.jobCompleted()); + assertEquals(expected.executionErrors(), value.executionErrors()); + assertEquals(expected.hasErrors(), value.hasErrors()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResultTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResultTest.java new file mode 100644 index 000000000000..b6810ed93143 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResultTest.java @@ -0,0 +1,91 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class QueryResultTest { + + private static final String CURSOR = "cursor"; + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Schema SCHEMA = Schema.of(FIELD_SCHEMA1); + private static final long TOTAL_ROWS = 42L; + private static final QueryResult.QueryResultsPageFetcher FETCHER = + new QueryResult.QueryResultsPageFetcher() { + @Override + public QueryResult nextPage() { + return null; + } + }; + private static final long TOTAL_BYTES_PROCESSED = 4200L; + private static final boolean CACHE_HIT = false; + private static final QueryResult QUERY_RESULT = QueryResult.builder() + .schema(SCHEMA) + .totalRows(TOTAL_ROWS) + .totalBytesProcessed(TOTAL_BYTES_PROCESSED) + .cursor(CURSOR) + .pageFetcher(FETCHER) + .results(ImmutableList.>of()) + .cacheHit(CACHE_HIT) + .build(); + private static final QueryResult QUERY_RESULT_INCOMPLETE = QueryResult.builder() + .totalBytesProcessed(TOTAL_BYTES_PROCESSED) + .build(); + + @Test + public void testBuilder() { + assertEquals(SCHEMA, QUERY_RESULT.schema()); + assertEquals(TOTAL_ROWS, QUERY_RESULT.totalRows()); + assertEquals(TOTAL_BYTES_PROCESSED, QUERY_RESULT.totalBytesProcessed()); + assertEquals(CACHE_HIT, QUERY_RESULT.cacheHit()); + assertEquals(CURSOR, QUERY_RESULT.nextPageCursor()); + assertEquals(null, QUERY_RESULT.nextPage()); + assertEquals(null, QUERY_RESULT_INCOMPLETE.schema()); + assertEquals(0L, QUERY_RESULT_INCOMPLETE.totalRows()); + assertEquals(TOTAL_BYTES_PROCESSED, QUERY_RESULT_INCOMPLETE.totalBytesProcessed()); + assertEquals(false, QUERY_RESULT_INCOMPLETE.cacheHit()); + assertEquals(null, QUERY_RESULT_INCOMPLETE.nextPageCursor()); + assertEquals(null, QUERY_RESULT_INCOMPLETE.nextPage()); + } + + @Test + public void testEquals() { + compareQueryResult(QUERY_RESULT, QUERY_RESULT); + compareQueryResult(QUERY_RESULT_INCOMPLETE, QUERY_RESULT_INCOMPLETE); + } + + private void compareQueryResult(QueryResult expected, QueryResult value) { + assertEquals(expected, value); + assertEquals(expected.nextPage(), value.nextPage()); + assertEquals(expected.nextPageCursor(), value.nextPageCursor()); + assertEquals(expected.values(), value.values()); + assertEquals(expected.schema(), value.schema()); + assertEquals(expected.totalRows(), value.totalRows()); + assertEquals(expected.totalBytesProcessed(), value.totalBytesProcessed()); + assertEquals(expected.cacheHit(), value.cacheHit()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryStageTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryStageTest.java new file mode 100644 index 000000000000..99a7c8096454 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryStageTest.java @@ -0,0 +1,131 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.api.services.bigquery.model.ExplainQueryStep; +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.QueryStage.QueryStep; + +import org.junit.Test; + +import java.util.List; + +public class QueryStageTest { + + private static final List SUBSTEPS1 = ImmutableList.of("substep1", "substep2"); + private static final List SUBSTEPS2 = ImmutableList.of("substep3", "substep4"); + private static final QueryStep QUERY_STEP1 = new QueryStep("KIND", SUBSTEPS1); + private static final QueryStep QUERY_STEP2 = new QueryStep("KIND", SUBSTEPS2); + private static final double COMPUTE_RATIO_AVG = 1.1; + private static final double COMPUTE_RATIO_MAX = 2.2; + private static final long ID = 42L; + private static final String NAME = "StageName"; + private static final double READ_RATIO_AVG = 3.3; + private static final double READ_RATIO_MAX = 4.4; + private static final long RECORDS_READ = 5L; + private static final long RECORDS_WRITTEN = 6L; + private static final List STEPS = ImmutableList.of(QUERY_STEP1, QUERY_STEP2); + private static final double WAIT_RATIO_AVG = 7.7; + private static final double WAIT_RATIO_MAX = 8.8; + private static final double WRITE_RATIO_AVG = 9.9; + private static final double WRITE_RATIO_MAX = 10.10; + private static final QueryStage QUERY_STAGE = QueryStage.builder() + .computeRatioAvg(COMPUTE_RATIO_AVG) + .computeRatioMax(COMPUTE_RATIO_MAX) + .id(ID) + .name(NAME) + .readRatioAvg(READ_RATIO_AVG) + .readRatioMax(READ_RATIO_MAX) + .recordsRead(RECORDS_READ) + .recordsWritten(RECORDS_WRITTEN) + .steps(STEPS) + .waitRatioAvg(WAIT_RATIO_AVG) + .waitRatioMax(WAIT_RATIO_MAX) + .writeRatioAvg(WRITE_RATIO_AVG) + .writeRatioMax(WRITE_RATIO_MAX) + .build(); + + @Test + public void testQueryStepConstructor() { + assertEquals("KIND", QUERY_STEP1.name()); + assertEquals("KIND", QUERY_STEP2.name()); + assertEquals(SUBSTEPS1, QUERY_STEP1.substeps()); + assertEquals(SUBSTEPS2, QUERY_STEP2.substeps()); + } + + @Test + public void testBuilder() { + assertEquals(COMPUTE_RATIO_AVG, QUERY_STAGE.computeRatioAvg(), 0); + assertEquals(COMPUTE_RATIO_MAX, QUERY_STAGE.computeRatioMax(), 0); + assertEquals(ID, QUERY_STAGE.id()); + assertEquals(NAME, QUERY_STAGE.name()); + assertEquals(READ_RATIO_AVG, QUERY_STAGE.readRatioAvg(), 0); + assertEquals(READ_RATIO_MAX, QUERY_STAGE.readRatioMax(), 0); + assertEquals(RECORDS_READ, QUERY_STAGE.recordsRead()); + assertEquals(RECORDS_WRITTEN, QUERY_STAGE.recordsWritten()); + assertEquals(STEPS, QUERY_STAGE.steps()); + assertEquals(WAIT_RATIO_AVG, QUERY_STAGE.waitRatioAvg(), 0); + assertEquals(WAIT_RATIO_MAX, QUERY_STAGE.waitRatioMax(), 0); + assertEquals(WRITE_RATIO_AVG, QUERY_STAGE.writeRatioAvg(), 0); + assertEquals(WRITE_RATIO_MAX, QUERY_STAGE.writeRatioMax(), 0); + } + + @Test + public void testToAndFromPb() { + compareQueryStep(QUERY_STEP1, QueryStep.fromPb(QUERY_STEP1.toPb())); + compareQueryStep(QUERY_STEP2, QueryStep.fromPb(QUERY_STEP2.toPb())); + compareQueryStage(QUERY_STAGE, QueryStage.fromPb(QUERY_STAGE.toPb())); + ExplainQueryStep stepPb = new ExplainQueryStep(); + stepPb.setKind("KIND"); + stepPb.setSubsteps(null); + compareQueryStep(new QueryStep("KIND", ImmutableList.of()), QueryStep.fromPb(stepPb)); + } + + @Test + public void testEquals() { + compareQueryStep(QUERY_STEP1, QUERY_STEP1); + compareQueryStep(QUERY_STEP2, QUERY_STEP2); + compareQueryStage(QUERY_STAGE, QUERY_STAGE); + } + + private void compareQueryStage(QueryStage expected, QueryStage value) { + assertEquals(expected, value); + assertEquals(expected.computeRatioAvg(), value.computeRatioAvg(), 0); + assertEquals(expected.computeRatioMax(), value.computeRatioMax(), 0); + assertEquals(expected.id(), value.id()); + assertEquals(expected.name(), value.name()); + assertEquals(expected.readRatioAvg(), value.readRatioAvg(), 0); + assertEquals(expected.readRatioMax(), value.readRatioMax(), 0); + assertEquals(expected.recordsRead(), value.recordsRead()); + assertEquals(expected.recordsWritten(), value.recordsWritten()); + assertEquals(expected.steps(), value.steps()); + assertEquals(expected.waitRatioAvg(), value.waitRatioAvg(), 0); + assertEquals(expected.waitRatioMax(), value.waitRatioMax(), 0); + assertEquals(expected.writeRatioAvg(), value.writeRatioAvg(), 0); + assertEquals(expected.writeRatioMax(), value.writeRatioMax(), 0); + assertEquals(expected.hashCode(), value.hashCode()); + } + + private void compareQueryStep(QueryStep expected, QueryStep value) { + assertEquals(expected, value); + assertEquals(expected.name(), value.name()); + assertEquals(expected.substeps(), value.substeps()); + assertEquals(expected.hashCode(), value.hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/RemoteBigQueryHelperTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/RemoteBigQueryHelperTest.java new file mode 100644 index 000000000000..62a88c1860cd --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/RemoteBigQueryHelperTest.java @@ -0,0 +1,91 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import com.google.gcloud.bigquery.BigQuery.DatasetDeleteOption; +import com.google.gcloud.bigquery.testing.RemoteBigQueryHelper; + +import org.easymock.EasyMock; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.util.concurrent.ExecutionException; + +public class RemoteBigQueryHelperTest { + + private static final String DATASET_NAME = "dataset-name"; + private static final String PROJECT_ID = "project-id"; + private static final String JSON_KEY = "{\n" + + " \"private_key_id\": \"somekeyid\",\n" + + " \"private_key\": \"-----BEGIN PRIVATE KEY-----\\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggS" + + "kAgEAAoIBAQC+K2hSuFpAdrJI\\nnCgcDz2M7t7bjdlsadsasad+fvRSW6TjNQZ3p5LLQY1kSZRqBqylRkzteMOyHg" + + "aR\\n0Pmxh3ILCND5men43j3h4eDbrhQBuxfEMalkG92sL+PNQSETY2tnvXryOvmBRwa/\\nQP/9dJfIkIDJ9Fw9N4" + + "Bhhhp6mCcRpdQjV38H7JsyJ7lih/oNjECgYAt\\nknddadwkwewcVxHFhcZJO+XWf6ofLUXpRwiTZakGMn8EE1uVa2" + + "LgczOjwWHGi99MFjxSer5m9\\n1tCa3/KEGKiS/YL71JvjwX3mb+cewlkcmweBKZHM2JPTk0ZednFSpVZMtycjkbLa" + + "\\ndYOS8V85AgMBewECggEBAKksaldajfDZDV6nGqbFjMiizAKJolr/M3OQw16K6o3/\\n0S31xIe3sSlgW0+UbYlF" + + "4U8KifhManD1apVSC3csafaspP4RZUHFhtBywLO9pR5c\\nr6S5aLp+gPWFyIp1pfXbWGvc5VY/v9x7ya1VEa6rXvL" + + "sKupSeWAW4tMj3eo/64ge\\nsdaceaLYw52KeBYiT6+vpsnYrEkAHO1fF/LavbLLOFJmFTMxmsNaG0tuiJHgjshB\\" + + "n82DpMCbXG9YcCgI/DbzuIjsdj2JC1cascSP//3PmefWysucBQe7Jryb6NQtASmnv\\nCdDw/0jmZTEjpe4S1lxfHp" + + "lAhHFtdgYTvyYtaLZiVVkCgYEA8eVpof2rceecw/I6\\n5ng1q3Hl2usdWV/4mZMvR0fOemacLLfocX6IYxT1zA1FF" + + "JlbXSRsJMf/Qq39mOR2\\nSpW+hr4jCoHeRVYLgsbggtrevGmILAlNoqCMpGZ6vDmJpq6ECV9olliDvpPgWOP+\\nm" + + "YPDreFBGxWvQrADNbRt2dmGsrsCgYEAyUHqB2wvJHFqdmeBsaacewzV8x9WgmeX\\ngUIi9REwXlGDW0Mz50dxpxcK" + + "CAYn65+7TCnY5O/jmL0VRxU1J2mSWyWTo1C+17L0\\n3fUqjxL1pkefwecxwecvC+gFFYdJ4CQ/MHHXU81Lwl1iWdF" + + "Cd2UoGddYaOF+KNeM\\nHC7cmqra+JsCgYEAlUNywzq8nUg7282E+uICfCB0LfwejuymR93CtsFgb7cRd6ak\\nECR" + + "8FGfCpH8ruWJINllbQfcHVCX47ndLZwqv3oVFKh6pAS/vVI4dpOepP8++7y1u\\ncoOvtreXCX6XqfrWDtKIvv0vjl" + + "HBhhhp6mCcRpdQjV38H7JsyJ7lih/oNjECgYAt\\nkndj5uNl5SiuVxHFhcZJO+XWf6ofLUregtevZakGMn8EE1uVa" + + "2AY7eafmoU/nZPT\\n00YB0TBATdCbn/nBSuKDESkhSg9s2GEKQZG5hBmL5uCMfo09z3SfxZIhJdlerreP\\nJ7gSi" + + "dI12N+EZxYd4xIJh/HFDgp7RRO87f+WJkofMQKBgGTnClK1VMaCRbJZPriw\\nEfeFCoOX75MxKwXs6xgrw4W//AYG" + + "GUjDt83lD6AZP6tws7gJ2IwY/qP7+lyhjEqN\\nHtfPZRGFkGZsdaksdlaksd323423d+15/UvrlRSFPNj1tWQmNKk" + + "XyRDW4IG1Oa2p\\nrALStNBx5Y9t0/LQnFI4w3aG\\n-----END PRIVATE KEY-----\\n\",\n" + + " \"client_email\": \"someclientid@developer.gserviceaccount.com\",\n" + + " \"client_id\": \"someclientid.apps.googleusercontent.com\",\n" + + " \"type\": \"service_account\"\n" + + "}"; + private static final InputStream JSON_KEY_STREAM = new ByteArrayInputStream(JSON_KEY.getBytes()); + + @Rule + public ExpectedException thrown = ExpectedException.none(); + @Test + public void testForceDelete() throws InterruptedException, ExecutionException { + BigQuery bigqueryMock = EasyMock.createMock(BigQuery.class); + EasyMock.expect(bigqueryMock.delete(DATASET_NAME, DatasetDeleteOption.deleteContents())) + .andReturn(true); + EasyMock.replay(bigqueryMock); + assertTrue(RemoteBigQueryHelper.forceDelete(bigqueryMock, DATASET_NAME)); + EasyMock.verify(bigqueryMock); + } + + @Test + public void testCreateFromStream() { + RemoteBigQueryHelper helper = RemoteBigQueryHelper.create(PROJECT_ID, JSON_KEY_STREAM); + BigQueryOptions options = helper.options(); + assertEquals(PROJECT_ID, options.projectId()); + assertEquals(60000, options.connectTimeout()); + assertEquals(60000, options.readTimeout()); + assertEquals(10, options.retryParams().retryMaxAttempts()); + assertEquals(6, options.retryParams().retryMinAttempts()); + assertEquals(30000, options.retryParams().maxRetryDelayMillis()); + assertEquals(120000, options.retryParams().totalRetryPeriodMillis()); + assertEquals(250, options.retryParams().initialRetryDelayMillis()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SchemaTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SchemaTest.java new file mode 100644 index 000000000000..d24268d2e7cd --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SchemaTest.java @@ -0,0 +1,77 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class SchemaTest { + + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final List FIELDS = ImmutableList.of(FIELD_SCHEMA1, FIELD_SCHEMA2, + FIELD_SCHEMA3); + private static final Schema TABLE_SCHEMA = Schema.builder().fields(FIELDS).build(); + + @Test + public void testToBuilder() { + compareTableSchema(TABLE_SCHEMA, TABLE_SCHEMA.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(FIELDS, TABLE_SCHEMA.fields()); + Schema schema = TABLE_SCHEMA.toBuilder() + .fields(FIELD_SCHEMA1, FIELD_SCHEMA2) + .addField(FIELD_SCHEMA3) + .build(); + compareTableSchema(TABLE_SCHEMA, schema); + } + + @Test + public void testOf() { + compareTableSchema(TABLE_SCHEMA, Schema.of(FIELDS)); + } + + @Test + public void testToAndFromPb() { + compareTableSchema(TABLE_SCHEMA, Schema.fromPb(TABLE_SCHEMA.toPb())); + } + + private void compareTableSchema(Schema expected, Schema value) { + assertEquals(expected, value); + assertEquals(expected.fields(), value.fields()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java new file mode 100644 index 000000000000..19b281f073b3 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java @@ -0,0 +1,295 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotSame; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.AuthCredentials; +import com.google.gcloud.RestorableState; +import com.google.gcloud.RetryParams; +import com.google.gcloud.WriteChannel; +import com.google.gcloud.bigquery.TableInfo.StreamingBuffer; + +import org.junit.Test; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Map; + +public class SerializationTest { + + private static final Acl DOMAIN_ACCESS = + Acl.of(new Acl.Domain("domain"), Acl.Role.WRITER); + private static final Acl GROUP_ACCESS = + Acl.of(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER); + private static final Acl USER_ACCESS = Acl.of(new Acl.User("user"), Acl.Role.OWNER); + private static final Acl VIEW_ACCESS = + Acl.of(new Acl.View(TableId.of("project", "dataset", "table")), Acl.Role.WRITER); + private static final List ACCESS_RULES = ImmutableList.of(DOMAIN_ACCESS, GROUP_ACCESS, + VIEW_ACCESS, USER_ACCESS); + private static final Long CREATION_TIME = System.currentTimeMillis() - 10; + private static final Long DEFAULT_TABLE_EXPIRATION = 100L; + private static final String DESCRIPTION = "Description"; + private static final String ETAG = "0xFF00"; + private static final String FRIENDLY_NAME = "friendlyDataset"; + private static final String ID = "P/D:1"; + private static final Long LAST_MODIFIED = CREATION_TIME + 50; + private static final String LOCATION = ""; + private static final String SELF_LINK = "http://bigquery/p/d"; + private static final DatasetId DATASET_ID = DatasetId.of("project", "dataset"); + private static final DatasetInfo DATASET_INFO = DatasetInfo.builder(DATASET_ID) + .acl(ACCESS_RULES) + .creationTime(CREATION_TIME) + .defaultTableLifetime(DEFAULT_TABLE_EXPIRATION) + .description(DESCRIPTION) + .etag(ETAG) + .friendlyName(FRIENDLY_NAME) + .id(ID) + .lastModified(LAST_MODIFIED) + .location(LOCATION) + .selfLink(SELF_LINK) + .build(); + private static final TableId TABLE_ID = TableId.of("project", "dataset", "table"); + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder() + .allowJaggedRows(true) + .allowQuotedNewLines(false) + .encoding(StandardCharsets.ISO_8859_1) + .fieldDelimiter(",") + .quote("\"") + .skipLeadingRows(42) + .build(); + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); + private static final StreamingBuffer STREAMING_BUFFER = new StreamingBuffer(1L, 2L, 3L); + private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); + private static final ExternalDataConfiguration EXTERNAL_DATA_CONFIGURATION = + ExternalDataConfiguration.builder(SOURCE_URIS, TABLE_SCHEMA, CSV_OPTIONS) + .ignoreUnknownValues(true) + .maxBadRecords(42) + .build(); + private static final UserDefinedFunction INLINE_FUNCTION = + new UserDefinedFunction.InlineFunction("inline"); + private static final UserDefinedFunction URI_FUNCTION = + new UserDefinedFunction.UriFunction("URI"); + private static final BaseTableInfo TABLE_INFO = + TableInfo.builder(TABLE_ID, TABLE_SCHEMA) + .creationTime(CREATION_TIME) + .description(DESCRIPTION) + .etag(ETAG) + .id(ID) + .location(LOCATION) + .streamingBuffer(STREAMING_BUFFER) + .build(); + private static final ViewInfo VIEW_INFO = + ViewInfo.builder(TABLE_ID, "QUERY") + .creationTime(CREATION_TIME) + .description(DESCRIPTION) + .etag(ETAG) + .id(ID) + .build(); + private static final ExternalTableInfo EXTERNAL_TABLE_INFO = + ExternalTableInfo.builder(TABLE_ID, EXTERNAL_DATA_CONFIGURATION) + .creationTime(CREATION_TIME) + .description(DESCRIPTION) + .etag(ETAG) + .id(ID) + .build(); + private static final JobStatistics JOB_STATISTICS = JobStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .build(); + private static final JobStatistics.ExtractStatistics EXTRACT_STATISTICS = + JobStatistics.ExtractStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .destinationUriFileCounts(ImmutableList.of(42L)) + .build(); + private static final JobStatistics.LoadStatistics LOAD_STATISTICS = + JobStatistics.LoadStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .inputFiles(42L) + .outputBytes(1024L) + .inputBytes(2048L) + .outputRows(24L) + .build(); + private static final JobStatistics.QueryStatistics QUERY_STATISTICS = + JobStatistics.QueryStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .totalBytesProcessed(2048L) + .totalBytesBilled(1024L) + .cacheHit(false) + .billingTier(42) + .build(); + private static final BigQueryError BIGQUERY_ERROR = + new BigQueryError("reason", "location", "message", "debugInfo"); + private static final JobStatus JOB_STATUS = new JobStatus(JobStatus.State.DONE, BIGQUERY_ERROR, + ImmutableList.of(BIGQUERY_ERROR)); + private static final JobId JOB_ID = JobId.of("project", "job"); + private static final CopyJobConfiguration COPY_JOB_CONFIGURATION = + CopyJobConfiguration.of(TABLE_ID, TABLE_ID); + private static final ExtractJobConfiguration EXTRACT_JOB_CONFIGURATION = + ExtractJobConfiguration.of(TABLE_ID, SOURCE_URIS); + private static final WriteChannelConfiguration LOAD_CONFIGURATION = + WriteChannelConfiguration.builder(TABLE_ID) + .createDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .writeDisposition(JobInfo.WriteDisposition.WRITE_APPEND) + .formatOptions(CSV_OPTIONS) + .ignoreUnknownValues(true) + .maxBadRecords(10) + .schema(TABLE_SCHEMA) + .build(); + private static final LoadJobConfiguration LOAD_JOB_CONFIGURATION = + LoadJobConfiguration.of(TABLE_ID, SOURCE_URIS); + private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION = + QueryJobConfiguration.of("query"); + private static final JobInfo JOB_INFO = JobInfo.of(COPY_JOB_CONFIGURATION); + private static final Map CONTENT1 = + ImmutableMap.of("key", "val1"); + private static final Map CONTENT2 = + ImmutableMap.of("key", "val2"); + private static final InsertAllRequest INSERT_ALL_REQUEST = InsertAllRequest.builder(TABLE_ID) + .addRow(CONTENT1) + .addRow(CONTENT2) + .ignoreUnknownValues(true) + .skipInvalidRows(false) + .build(); + private static final Map> ERRORS_MAP = + ImmutableMap.>of(0L, ImmutableList.of(BIGQUERY_ERROR)); + private static final InsertAllResponse INSERT_ALL_RESPONSE = new InsertAllResponse(ERRORS_MAP); + private static final FieldValue FIELD_VALUE = + new FieldValue(FieldValue.Attribute.PRIMITIVE, "value"); + private static final QueryRequest QUERY_REQUEST = QueryRequest.builder("query") + .useQueryCache(true) + .defaultDataset(DATASET_ID) + .dryRun(false) + .maxResults(42L) + .maxWaitTime(10L) + .build(); + private static final QueryResult QUERY_RESULT = QueryResult.builder() + .schema(TABLE_SCHEMA) + .totalRows(1L) + .totalBytesProcessed(42L) + .cursor("cursor") + .pageFetcher(null) + .results(ImmutableList.>of()) + .build(); + private static final QueryResponse QUERY_RESPONSE = QueryResponse.builder() + .etag(ETAG) + .jobId(JOB_ID) + .jobCompleted(true) + .result(QUERY_RESULT) + .build(); + + @Test + public void testServiceOptions() throws Exception { + BigQueryOptions options = BigQueryOptions.builder() + .projectId("p1") + .authCredentials(AuthCredentials.createForAppEngine()) + .build(); + BigQueryOptions serializedCopy = serializeAndDeserialize(options); + assertEquals(options, serializedCopy); + + options = options.toBuilder() + .projectId("p2") + .retryParams(RetryParams.defaultInstance()) + .authCredentials(null) + .build(); + serializedCopy = serializeAndDeserialize(options); + assertEquals(options, serializedCopy); + } + + @Test + public void testModelAndRequests() throws Exception { + Serializable[] objects = {DOMAIN_ACCESS, GROUP_ACCESS, USER_ACCESS, VIEW_ACCESS, DATASET_ID, + DATASET_INFO, TABLE_ID, CSV_OPTIONS, STREAMING_BUFFER, EXTERNAL_DATA_CONFIGURATION, + TABLE_SCHEMA, TABLE_INFO, VIEW_INFO, EXTERNAL_TABLE_INFO, INLINE_FUNCTION, URI_FUNCTION, + JOB_STATISTICS, EXTRACT_STATISTICS, LOAD_STATISTICS, QUERY_STATISTICS, BIGQUERY_ERROR, + JOB_STATUS, JOB_ID, COPY_JOB_CONFIGURATION, EXTRACT_JOB_CONFIGURATION, LOAD_CONFIGURATION, + LOAD_JOB_CONFIGURATION, QUERY_JOB_CONFIGURATION, JOB_INFO, INSERT_ALL_REQUEST, + INSERT_ALL_RESPONSE, FIELD_VALUE, QUERY_REQUEST, QUERY_RESPONSE, + BigQuery.DatasetOption.fields(), BigQuery.DatasetDeleteOption.deleteContents(), + BigQuery.DatasetListOption.all(), BigQuery.TableOption.fields(), + BigQuery.TableListOption.maxResults(42L), BigQuery.JobOption.fields(), + BigQuery.JobListOption.allUsers()}; + for (Serializable obj : objects) { + Object copy = serializeAndDeserialize(obj); + assertEquals(obj, obj); + assertEquals(obj, copy); + assertNotSame(obj, copy); + assertEquals(copy, copy); + } + } + + @Test + public void testWriteChannelState() throws IOException, ClassNotFoundException { + BigQueryOptions options = BigQueryOptions.builder() + .projectId("p2") + .retryParams(RetryParams.defaultInstance()) + .build(); + // avoid closing when you don't want partial writes upon failure + @SuppressWarnings("resource") + TableDataWriteChannel writer = + new TableDataWriteChannel(options, LOAD_CONFIGURATION, "upload-id"); + RestorableState state = writer.capture(); + RestorableState deserializedState = serializeAndDeserialize(state); + assertEquals(state, deserializedState); + assertEquals(state.hashCode(), deserializedState.hashCode()); + assertEquals(state.toString(), deserializedState.toString()); + } + + @SuppressWarnings("unchecked") + private T serializeAndDeserialize(T obj) + throws IOException, ClassNotFoundException { + ByteArrayOutputStream bytes = new ByteArrayOutputStream(); + try (ObjectOutputStream output = new ObjectOutputStream(bytes)) { + output.writeObject(obj); + } + try (ObjectInputStream input = + new ObjectInputStream(new ByteArrayInputStream(bytes.toByteArray()))) { + return (T) input.readObject(); + } + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableDataWriteChannelTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableDataWriteChannelTest.java new file mode 100644 index 000000000000..6b7edcd76db1 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableDataWriteChannelTest.java @@ -0,0 +1,249 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.easymock.EasyMock.anyObject; +import static org.easymock.EasyMock.capture; +import static org.easymock.EasyMock.captureLong; +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.eq; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.expectLastCall; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import com.google.gcloud.RestorableState; +import com.google.gcloud.WriteChannel; +import com.google.gcloud.spi.BigQueryRpc; +import com.google.gcloud.spi.BigQueryRpcFactory; + +import org.easymock.Capture; +import org.easymock.CaptureType; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Random; + +public class TableDataWriteChannelTest { + + private static final String UPLOAD_ID = "uploadid"; + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final WriteChannelConfiguration LOAD_CONFIGURATION = + WriteChannelConfiguration.builder(TABLE_ID) + .createDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .writeDisposition(JobInfo.WriteDisposition.WRITE_APPEND) + .formatOptions(FormatOptions.json()) + .ignoreUnknownValues(true) + .maxBadRecords(10) + .build(); + private static final int MIN_CHUNK_SIZE = 256 * 1024; + private static final int DEFAULT_CHUNK_SIZE = 8 * MIN_CHUNK_SIZE; + private static final int CUSTOM_CHUNK_SIZE = 4 * MIN_CHUNK_SIZE; + private static final Random RANDOM = new Random(); + + private BigQueryOptions options; + private BigQueryRpcFactory rpcFactoryMock; + private BigQueryRpc bigqueryRpcMock; + private TableDataWriteChannel writer; + + @Before + public void setUp() { + rpcFactoryMock = createMock(BigQueryRpcFactory.class); + bigqueryRpcMock = createMock(BigQueryRpc.class); + expect(rpcFactoryMock.create(anyObject(BigQueryOptions.class))) + .andReturn(bigqueryRpcMock); + replay(rpcFactoryMock); + options = BigQueryOptions.builder() + .projectId("projectid") + .serviceRpcFactory(rpcFactoryMock) + .build(); + } + + @After + public void tearDown() throws Exception { + verify(rpcFactoryMock, bigqueryRpcMock); + } + + @Test + public void testCreate() { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + assertTrue(writer.isOpen()); + } + + @Test + public void testWriteWithoutFlush() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + assertEquals(MIN_CHUNK_SIZE, writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE))); + } + + @Test + public void testWriteWithFlush() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), + eq(CUSTOM_CHUNK_SIZE), eq(false)); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + writer.chunkSize(CUSTOM_CHUNK_SIZE); + ByteBuffer buffer = randomBuffer(CUSTOM_CHUNK_SIZE); + assertEquals(CUSTOM_CHUNK_SIZE, writer.write(buffer)); + assertArrayEquals(buffer.array(), capturedBuffer.getValue()); + } + + @Test + public void testWritesAndFlush() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), + eq(DEFAULT_CHUNK_SIZE), eq(false)); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + ByteBuffer[] buffers = new ByteBuffer[DEFAULT_CHUNK_SIZE / MIN_CHUNK_SIZE]; + for (int i = 0; i < buffers.length; i++) { + buffers[i] = randomBuffer(MIN_CHUNK_SIZE); + assertEquals(MIN_CHUNK_SIZE, writer.write(buffers[i])); + } + for (int i = 0; i < buffers.length; i++) { + assertArrayEquals( + buffers[i].array(), + Arrays.copyOfRange( + capturedBuffer.getValue(), MIN_CHUNK_SIZE * i, MIN_CHUNK_SIZE * (i + 1))); + } + } + + @Test + public void testCloseWithoutFlush() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true)); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + assertTrue(writer.isOpen()); + writer.close(); + assertArrayEquals(new byte[0], capturedBuffer.getValue()); + assertTrue(!writer.isOpen()); + } + + @Test + public void testCloseWithFlush() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + ByteBuffer buffer = randomBuffer(MIN_CHUNK_SIZE); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(MIN_CHUNK_SIZE), + eq(true)); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + assertTrue(writer.isOpen()); + writer.write(buffer); + writer.close(); + assertEquals(DEFAULT_CHUNK_SIZE, capturedBuffer.getValue().length); + assertArrayEquals(buffer.array(), Arrays.copyOf(capturedBuffer.getValue(), MIN_CHUNK_SIZE)); + assertTrue(!writer.isOpen()); + } + + @Test + public void testWriteClosed() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true)); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + writer.close(); + try { + writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE)); + fail("Expected TableDataWriteChannel write to throw IOException"); + } catch (IOException ex) { + // expected + } + } + + @Test + public void testSaveAndRestore() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(CaptureType.ALL); + Capture capturedPosition = Capture.newInstance(CaptureType.ALL); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), + captureLong(capturedPosition), eq(DEFAULT_CHUNK_SIZE), eq(false)); + expectLastCall().times(2); + replay(bigqueryRpcMock); + ByteBuffer buffer1 = randomBuffer(DEFAULT_CHUNK_SIZE); + ByteBuffer buffer2 = randomBuffer(DEFAULT_CHUNK_SIZE); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + assertEquals(DEFAULT_CHUNK_SIZE, writer.write(buffer1)); + assertArrayEquals(buffer1.array(), capturedBuffer.getValues().get(0)); + assertEquals(new Long(0L), capturedPosition.getValues().get(0)); + RestorableState writerState = writer.capture(); + WriteChannel restoredWriter = writerState.restore(); + assertEquals(DEFAULT_CHUNK_SIZE, restoredWriter.write(buffer2)); + assertArrayEquals(buffer2.array(), capturedBuffer.getValues().get(1)); + assertEquals(new Long(DEFAULT_CHUNK_SIZE), capturedPosition.getValues().get(1)); + } + + @Test + public void testSaveAndRestoreClosed() throws IOException { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + bigqueryRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true)); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + writer.close(); + RestorableState writerState = writer.capture(); + RestorableState expectedWriterState = + TableDataWriteChannel.StateImpl.builder(options, LOAD_CONFIGURATION, UPLOAD_ID) + .buffer(null) + .chunkSize(DEFAULT_CHUNK_SIZE) + .isOpen(false) + .position(0) + .build(); + WriteChannel restoredWriter = writerState.restore(); + assertArrayEquals(new byte[0], capturedBuffer.getValue()); + assertEquals(expectedWriterState, restoredWriter.capture()); + } + + @Test + public void testStateEquals() { + expect(bigqueryRpcMock.open(LOAD_CONFIGURATION.toPb())).andReturn(UPLOAD_ID).times(2); + replay(bigqueryRpcMock); + writer = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + // avoid closing when you don't want partial writes upon failure + @SuppressWarnings("resource") + WriteChannel writer2 = new TableDataWriteChannel(options, LOAD_CONFIGURATION); + RestorableState state = writer.capture(); + RestorableState state2 = writer2.capture(); + assertEquals(state, state2); + assertEquals(state.hashCode(), state2.hashCode()); + assertEquals(state.toString(), state2.toString()); + } + + private static ByteBuffer randomBuffer(int size) { + byte[] byteArray = new byte[size]; + RANDOM.nextBytes(byteArray); + return ByteBuffer.wrap(byteArray); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableIdTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableIdTest.java new file mode 100644 index 000000000000..bc013bfa5c31 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableIdTest.java @@ -0,0 +1,61 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class TableIdTest { + + private static final TableId TABLE = TableId.of("dataset", "table"); + private static final TableId TABLE_COMPLETE = TableId.of("project", "dataset", "table"); + + @Test + public void testOf() { + assertEquals(null, TABLE.project()); + assertEquals("dataset", TABLE.dataset()); + assertEquals("table", TABLE.table()); + assertEquals("project", TABLE_COMPLETE.project()); + assertEquals("dataset", TABLE_COMPLETE.dataset()); + assertEquals("table", TABLE_COMPLETE.table()); + } + + @Test + public void testEquals() { + compareTableIds(TABLE, TableId.of("dataset", "table")); + compareTableIds(TABLE_COMPLETE, TableId.of("project", "dataset", "table")); + } + + @Test + public void testToPbAndFromPb() { + compareTableIds(TABLE, TableId.fromPb(TABLE.toPb())); + compareTableIds(TABLE_COMPLETE, TableId.fromPb(TABLE_COMPLETE.toPb())); + } + + @Test + public void testSetProjectId() { + assertEquals(TABLE_COMPLETE, TABLE.setProjectId("project")); + } + + private void compareTableIds(TableId expected, TableId value) { + assertEquals(expected, value); + assertEquals(expected.project(), value.project()); + assertEquals(expected.dataset(), value.dataset()); + assertEquals(expected.hashCode(), value.hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableInfoTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableInfoTest.java new file mode 100644 index 000000000000..7326f6c51b95 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableInfoTest.java @@ -0,0 +1,243 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.TableInfo.StreamingBuffer; + +import org.junit.Test; + +import java.util.List; + +public class TableInfoTest { + + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); + private static final String VIEW_QUERY = "VIEW QUERY"; + private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); + private static final Integer MAX_BAD_RECORDS = 42; + private static final Boolean IGNORE_UNKNOWN_VALUES = true; + private static final String COMPRESSION = "GZIP"; + private static final ExternalDataConfiguration CONFIGURATION = ExternalDataConfiguration + .builder(SOURCE_URIS, TABLE_SCHEMA, FormatOptions.datastoreBackup()) + .compression(COMPRESSION) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .maxBadRecords(MAX_BAD_RECORDS) + .build(); + private static final String ETAG = "etag"; + private static final String ID = "project:dataset:table"; + private static final String SELF_LINK = "selfLink"; + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final String FRIENDLY_NAME = "friendlyName"; + private static final String DESCRIPTION = "description"; + private static final Long NUM_BYTES = 42L; + private static final Long NUM_ROWS = 43L; + private static final Long CREATION_TIME = 10L; + private static final Long EXPIRATION_TIME = 100L; + private static final Long LAST_MODIFIED_TIME = 20L; + private static final String LOCATION = "US"; + private static final StreamingBuffer STREAMING_BUFFER = new StreamingBuffer(1L, 2L, 3L); + private static final TableInfo TABLE_INFO = + TableInfo.builder(TABLE_ID, TABLE_SCHEMA) + .creationTime(CREATION_TIME) + .description(DESCRIPTION) + .etag(ETAG) + .expirationTime(EXPIRATION_TIME) + .friendlyName(FRIENDLY_NAME) + .id(ID) + .lastModifiedTime(LAST_MODIFIED_TIME) + .location(LOCATION) + .numBytes(NUM_BYTES) + .numRows(NUM_ROWS) + .selfLink(SELF_LINK) + .streamingBuffer(STREAMING_BUFFER) + .build(); + private static final ExternalTableInfo EXTERNAL_TABLE_INFO = + ExternalTableInfo.builder(TABLE_ID, CONFIGURATION) + .creationTime(CREATION_TIME) + .description(DESCRIPTION) + .etag(ETAG) + .expirationTime(EXPIRATION_TIME) + .friendlyName(FRIENDLY_NAME) + .id(ID) + .lastModifiedTime(LAST_MODIFIED_TIME) + .numBytes(NUM_BYTES) + .numRows(NUM_ROWS) + .selfLink(SELF_LINK) + .build(); + private static final List USER_DEFINED_FUNCTIONS = + ImmutableList.of(UserDefinedFunction.inline("Function"), UserDefinedFunction.fromUri("URI")); + private static final ViewInfo VIEW_INFO = + ViewInfo.builder(TABLE_ID, VIEW_QUERY, USER_DEFINED_FUNCTIONS) + .creationTime(CREATION_TIME) + .description(DESCRIPTION) + .etag(ETAG) + .expirationTime(EXPIRATION_TIME) + .friendlyName(FRIENDLY_NAME) + .id(ID) + .lastModifiedTime(LAST_MODIFIED_TIME) + .numBytes(NUM_BYTES) + .numRows(NUM_ROWS) + .selfLink(SELF_LINK) + .build(); + + @Test + public void testToBuilder() { + compareTableInfo(TABLE_INFO, TABLE_INFO.toBuilder().build()); + compareViewInfo(VIEW_INFO, VIEW_INFO.toBuilder().build()); + compareExternalTableInfo(EXTERNAL_TABLE_INFO, EXTERNAL_TABLE_INFO.toBuilder().build()); + BaseTableInfo tableInfo = TABLE_INFO.toBuilder() + .description("newDescription") + .build(); + assertEquals("newDescription", tableInfo.description()); + tableInfo = tableInfo.toBuilder() + .description("description") + .build(); + compareBaseTableInfo(TABLE_INFO, tableInfo); + } + + @Test + public void testToBuilderIncomplete() { + BaseTableInfo tableInfo = TableInfo.of(TABLE_ID, TABLE_SCHEMA); + assertEquals(tableInfo, tableInfo.toBuilder().build()); + tableInfo = ViewInfo.of(TABLE_ID, VIEW_QUERY); + assertEquals(tableInfo, tableInfo.toBuilder().build()); + tableInfo = ExternalTableInfo.of(TABLE_ID, CONFIGURATION); + assertEquals(tableInfo, tableInfo.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(TABLE_ID, TABLE_INFO.tableId()); + assertEquals(TABLE_SCHEMA, TABLE_INFO.schema()); + assertEquals(CREATION_TIME, TABLE_INFO.creationTime()); + assertEquals(DESCRIPTION, TABLE_INFO.description()); + assertEquals(ETAG, TABLE_INFO.etag()); + assertEquals(EXPIRATION_TIME, TABLE_INFO.expirationTime()); + assertEquals(FRIENDLY_NAME, TABLE_INFO.friendlyName()); + assertEquals(ID, TABLE_INFO.id()); + assertEquals(LAST_MODIFIED_TIME, TABLE_INFO.lastModifiedTime()); + assertEquals(LOCATION, TABLE_INFO.location()); + assertEquals(NUM_BYTES, TABLE_INFO.numBytes()); + assertEquals(NUM_ROWS, TABLE_INFO.numRows()); + assertEquals(SELF_LINK, TABLE_INFO.selfLink()); + assertEquals(STREAMING_BUFFER, TABLE_INFO.streamingBuffer()); + assertEquals(BaseTableInfo.Type.TABLE, TABLE_INFO.type()); + assertEquals(TABLE_ID, VIEW_INFO.tableId()); + assertEquals(null, VIEW_INFO.schema()); + assertEquals(VIEW_QUERY, VIEW_INFO.query()); + assertEquals(BaseTableInfo.Type.VIEW, VIEW_INFO.type()); + assertEquals(CREATION_TIME, VIEW_INFO.creationTime()); + assertEquals(DESCRIPTION, VIEW_INFO.description()); + assertEquals(ETAG, VIEW_INFO.etag()); + assertEquals(EXPIRATION_TIME, VIEW_INFO.expirationTime()); + assertEquals(FRIENDLY_NAME, VIEW_INFO.friendlyName()); + assertEquals(ID, VIEW_INFO.id()); + assertEquals(LAST_MODIFIED_TIME, VIEW_INFO.lastModifiedTime()); + assertEquals(NUM_BYTES, VIEW_INFO.numBytes()); + assertEquals(NUM_ROWS, VIEW_INFO.numRows()); + assertEquals(SELF_LINK, VIEW_INFO.selfLink()); + assertEquals(BaseTableInfo.Type.VIEW, VIEW_INFO.type()); + assertEquals(TABLE_ID, EXTERNAL_TABLE_INFO.tableId()); + assertEquals(null, EXTERNAL_TABLE_INFO.schema()); + assertEquals(CONFIGURATION, EXTERNAL_TABLE_INFO.configuration()); + assertEquals(CREATION_TIME, EXTERNAL_TABLE_INFO.creationTime()); + assertEquals(DESCRIPTION, EXTERNAL_TABLE_INFO.description()); + assertEquals(ETAG, EXTERNAL_TABLE_INFO.etag()); + assertEquals(EXPIRATION_TIME, EXTERNAL_TABLE_INFO.expirationTime()); + assertEquals(FRIENDLY_NAME, EXTERNAL_TABLE_INFO.friendlyName()); + assertEquals(ID, EXTERNAL_TABLE_INFO.id()); + assertEquals(LAST_MODIFIED_TIME, EXTERNAL_TABLE_INFO.lastModifiedTime()); + assertEquals(NUM_BYTES, EXTERNAL_TABLE_INFO.numBytes()); + assertEquals(NUM_ROWS, EXTERNAL_TABLE_INFO.numRows()); + assertEquals(SELF_LINK, EXTERNAL_TABLE_INFO.selfLink()); + assertEquals(BaseTableInfo.Type.EXTERNAL, EXTERNAL_TABLE_INFO.type()); + } + + @Test + public void testToAndFromPb() { + assertTrue(BaseTableInfo.fromPb(TABLE_INFO.toPb()) instanceof TableInfo); + compareTableInfo(TABLE_INFO, BaseTableInfo.fromPb(TABLE_INFO.toPb())); + assertTrue(BaseTableInfo.fromPb(VIEW_INFO.toPb()) instanceof ViewInfo); + compareViewInfo(VIEW_INFO, BaseTableInfo.fromPb(VIEW_INFO.toPb())); + assertTrue(BaseTableInfo.fromPb(EXTERNAL_TABLE_INFO.toPb()) instanceof ExternalTableInfo); + compareExternalTableInfo(EXTERNAL_TABLE_INFO, + BaseTableInfo.fromPb(EXTERNAL_TABLE_INFO.toPb())); + } + + @Test + public void testSetProjectId() { + assertEquals("project", TABLE_INFO.setProjectId("project").tableId().project()); + assertEquals("project", EXTERNAL_TABLE_INFO.setProjectId("project").tableId().project()); + assertEquals("project", VIEW_INFO.setProjectId("project").tableId().project()); + } + + private void compareBaseTableInfo(BaseTableInfo expected, BaseTableInfo value) { + assertEquals(expected, value); + assertEquals(expected.tableId(), value.tableId()); + assertEquals(expected.schema(), value.schema()); + assertEquals(expected.type(), value.type()); + assertEquals(expected.creationTime(), value.creationTime()); + assertEquals(expected.description(), value.description()); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.expirationTime(), value.expirationTime()); + assertEquals(expected.friendlyName(), value.friendlyName()); + assertEquals(expected.id(), value.id()); + assertEquals(expected.lastModifiedTime(), value.lastModifiedTime()); + assertEquals(expected.numBytes(), value.numBytes()); + assertEquals(expected.numRows(), value.numRows()); + assertEquals(expected.selfLink(), value.selfLink()); + assertEquals(expected.type(), value.type()); + } + + private void compareTableInfo(TableInfo expected, TableInfo value) { + compareBaseTableInfo(expected, value); + assertEquals(expected, value); + assertEquals(expected.location(), value.location()); + assertEquals(expected.streamingBuffer(), value.streamingBuffer()); + } + + private void compareViewInfo(ViewInfo expected, ViewInfo value) { + compareBaseTableInfo(expected, value); + assertEquals(expected, value); + assertEquals(expected.query(), value.query()); + assertEquals(expected.userDefinedFunctions(), value.userDefinedFunctions()); + } + + private void compareExternalTableInfo(ExternalTableInfo expected, ExternalTableInfo value) { + compareBaseTableInfo(expected, value); + assertEquals(expected, value); + assertEquals(expected.configuration(), value.configuration()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableTest.java new file mode 100644 index 000000000000..2d0b7e528750 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableTest.java @@ -0,0 +1,344 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.easymock.EasyMock.createStrictMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterators; +import com.google.gcloud.Page; +import com.google.gcloud.PageImpl; +import com.google.gcloud.bigquery.InsertAllRequest.RowToInsert; + +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.util.Iterator; +import java.util.List; + +public class TableTest { + + private static final TableId TABLE_ID1 = TableId.of("dataset", "table1"); + private static final TableId TABLE_ID2 = TableId.of("dataset", "table2"); + private static final CopyJobConfiguration COPY_JOB_CONFIGURATION = + CopyJobConfiguration.of(TABLE_ID2, TABLE_ID1); + private static final JobInfo COPY_JOB_INFO = JobInfo.of(COPY_JOB_CONFIGURATION); + private static final JobInfo LOAD_JOB_INFO = + JobInfo.of(LoadJobConfiguration.of(TABLE_ID1, ImmutableList.of("URI"), FormatOptions.json())); + private static final JobInfo EXTRACT_JOB_INFO = + JobInfo.of(ExtractJobConfiguration.of(TABLE_ID1, ImmutableList.of("URI"), "CSV")); + private static final Field FIELD = Field.of("FieldName", Field.Type.integer()); + private static final TableInfo TABLE_INFO = TableInfo.of(TABLE_ID1, Schema.of(FIELD)); + private static final List ROWS_TO_INSERT = ImmutableList.of( + RowToInsert.of("id1", ImmutableMap.of("key", "val1")), + RowToInsert.of("id2", ImmutableMap.of("key", "val2"))); + private static final InsertAllRequest INSERT_ALL_REQUEST = + InsertAllRequest.of(TABLE_ID1, ROWS_TO_INSERT); + private static final InsertAllRequest INSERT_ALL_REQUEST_COMPLETE = + InsertAllRequest.builder(TABLE_ID1, ROWS_TO_INSERT) + .skipInvalidRows(true) + .ignoreUnknownValues(true) + .build(); + private static final InsertAllResponse EMPTY_INSERT_ALL_RESPONSE = + new InsertAllResponse(ImmutableMap.>of()); + private static final FieldValue FIELD_VALUE1 = + new FieldValue(FieldValue.Attribute.PRIMITIVE, "val1"); + private static final FieldValue FIELD_VALUE2 = + new FieldValue(FieldValue.Attribute.PRIMITIVE, "val1"); + private static final Iterable> ROWS = ImmutableList.of( + (List) ImmutableList.of(FIELD_VALUE1), ImmutableList.of(FIELD_VALUE2)); + + @Rule + public ExpectedException thrown = ExpectedException.none(); + private BigQuery bigquery; + private Table table; + + @Before + public void setUp() throws Exception { + bigquery = createStrictMock(BigQuery.class); + table = new Table(bigquery, TABLE_INFO); + } + + @After + public void tearDown() throws Exception { + verify(bigquery); + } + + @Test + public void testInfo() throws Exception { + assertEquals(TABLE_INFO, table.info()); + replay(bigquery); + } + + @Test + public void testBigQuery() throws Exception { + assertSame(bigquery, table.bigquery()); + replay(bigquery); + } + + @Test + public void testExists_True() throws Exception { + BigQuery.TableOption[] expectedOptions = {BigQuery.TableOption.fields()}; + expect(bigquery.getTable(TABLE_INFO.tableId(), expectedOptions)).andReturn(TABLE_INFO); + replay(bigquery); + assertTrue(table.exists()); + } + + @Test + public void testExists_False() throws Exception { + BigQuery.TableOption[] expectedOptions = {BigQuery.TableOption.fields()}; + expect(bigquery.getTable(TABLE_INFO.tableId(), expectedOptions)).andReturn(null); + replay(bigquery); + assertFalse(table.exists()); + } + + @Test + public void testReload() throws Exception { + TableInfo updatedInfo = TABLE_INFO.toBuilder().description("Description").build(); + expect(bigquery.getTable(TABLE_INFO.tableId())).andReturn(updatedInfo); + replay(bigquery); + Table updatedTable = table.reload(); + assertSame(bigquery, updatedTable.bigquery()); + assertEquals(updatedInfo, updatedTable.info()); + } + + @Test + public void testReloadNull() throws Exception { + expect(bigquery.getTable(TABLE_INFO.tableId())).andReturn(null); + replay(bigquery); + assertNull(table.reload()); + } + + @Test + public void testReloadWithOptions() throws Exception { + TableInfo updatedInfo = TABLE_INFO.toBuilder().description("Description").build(); + expect(bigquery.getTable(TABLE_INFO.tableId(), BigQuery.TableOption.fields())) + .andReturn(updatedInfo); + replay(bigquery); + Table updatedTable = table.reload(BigQuery.TableOption.fields()); + assertSame(bigquery, updatedTable.bigquery()); + assertEquals(updatedInfo, updatedTable.info()); + } + + @Test + public void testUpdate() throws Exception { + BaseTableInfo updatedInfo = TABLE_INFO.toBuilder().description("Description").build(); + expect(bigquery.update(updatedInfo)).andReturn(updatedInfo); + replay(bigquery); + Table updatedTable = table.update(updatedInfo); + assertSame(bigquery, updatedTable.bigquery()); + assertEquals(updatedInfo, updatedTable.info()); + } + + @Test + public void testUpdateWithDifferentId() throws Exception { + TableInfo updatedInfo = TABLE_INFO.toBuilder() + .tableId(TableId.of("dataset", "table3")) + .description("Description") + .build(); + replay(bigquery); + thrown.expect(IllegalArgumentException.class); + table.update(updatedInfo); + } + + @Test + public void testUpdateWithDifferentDatasetId() throws Exception { + TableInfo updatedInfo = TABLE_INFO.toBuilder() + .tableId(TableId.of("dataset1", "table1")) + .description("Description") + .build(); + replay(bigquery); + thrown.expect(IllegalArgumentException.class); + table.update(updatedInfo); + } + + @Test + public void testUpdateWithOptions() throws Exception { + BaseTableInfo updatedInfo = TABLE_INFO.toBuilder().description("Description").build(); + expect(bigquery.update(updatedInfo, BigQuery.TableOption.fields())).andReturn(updatedInfo); + replay(bigquery); + Table updatedTable = table.update(updatedInfo, BigQuery.TableOption.fields()); + assertSame(bigquery, updatedTable.bigquery()); + assertEquals(updatedInfo, updatedTable.info()); + } + + @Test + public void testDelete() throws Exception { + expect(bigquery.delete(TABLE_INFO.tableId())).andReturn(true); + replay(bigquery); + assertTrue(table.delete()); + } + + @Test + public void testInsert() throws Exception { + expect(bigquery.insertAll(INSERT_ALL_REQUEST)).andReturn(EMPTY_INSERT_ALL_RESPONSE); + replay(bigquery); + InsertAllResponse response = table.insert(ROWS_TO_INSERT); + assertSame(EMPTY_INSERT_ALL_RESPONSE, response); + } + + @Test + public void testInsertComplete() throws Exception { + expect(bigquery.insertAll(INSERT_ALL_REQUEST_COMPLETE)).andReturn(EMPTY_INSERT_ALL_RESPONSE); + replay(bigquery); + InsertAllResponse response = table.insert(ROWS_TO_INSERT, true, true); + assertSame(EMPTY_INSERT_ALL_RESPONSE, response); + } + + @Test + public void testList() throws Exception { + PageImpl> tableDataPage = new PageImpl<>(null, "c", ROWS); + expect(bigquery.listTableData(TABLE_ID1)).andReturn(tableDataPage); + replay(bigquery); + Page> dataPage = table.list(); + Iterator> tableDataIterator = tableDataPage.values().iterator(); + Iterator> dataIterator = dataPage.values().iterator(); + assertTrue(Iterators.elementsEqual(tableDataIterator, dataIterator)); + } + + @Test + public void testListWithOptions() throws Exception { + PageImpl> tableDataPage = new PageImpl<>(null, "c", ROWS); + expect(bigquery.listTableData(TABLE_ID1, BigQuery.TableDataListOption.maxResults(10L))) + .andReturn(tableDataPage); + replay(bigquery); + Page> dataPage = table.list(BigQuery.TableDataListOption.maxResults(10L)); + Iterator> tableDataIterator = tableDataPage.values().iterator(); + Iterator> dataIterator = dataPage.values().iterator(); + assertTrue(Iterators.elementsEqual(tableDataIterator, dataIterator)); + } + + @Test + public void testCopyFromString() throws Exception { + expect(bigquery.create(COPY_JOB_INFO)).andReturn(COPY_JOB_INFO); + replay(bigquery); + Job job = table.copy(TABLE_ID2.dataset(), TABLE_ID2.table()); + assertSame(bigquery, job.bigquery()); + assertEquals(COPY_JOB_INFO, job.info()); + } + + @Test + public void testCopyFromId() throws Exception { + expect(bigquery.create(COPY_JOB_INFO)).andReturn(COPY_JOB_INFO); + replay(bigquery); + Job job = table.copy(TABLE_ID2); + assertSame(bigquery, job.bigquery()); + assertEquals(COPY_JOB_INFO, job.info()); + } + + @Test + public void testLoadDataUri() throws Exception { + expect(bigquery.create(LOAD_JOB_INFO)).andReturn(LOAD_JOB_INFO); + replay(bigquery); + Job job = table.load(FormatOptions.json(), "URI"); + assertSame(bigquery, job.bigquery()); + assertEquals(LOAD_JOB_INFO, job.info()); + } + + @Test + public void testLoadDataUris() throws Exception { + expect(bigquery.create(LOAD_JOB_INFO)).andReturn(LOAD_JOB_INFO); + replay(bigquery); + Job job = table.load(FormatOptions.json(), ImmutableList.of("URI")); + assertSame(bigquery, job.bigquery()); + assertEquals(LOAD_JOB_INFO, job.info()); + } + + @Test + public void testExtractDataUri() throws Exception { + expect(bigquery.create(EXTRACT_JOB_INFO)).andReturn(EXTRACT_JOB_INFO); + replay(bigquery); + Job job = table.extract("CSV", "URI"); + assertSame(bigquery, job.bigquery()); + assertEquals(EXTRACT_JOB_INFO, job.info()); + } + + @Test + public void testExtractDataUris() throws Exception { + expect(bigquery.create(EXTRACT_JOB_INFO)).andReturn(EXTRACT_JOB_INFO); + replay(bigquery); + Job job = table.extract("CSV", ImmutableList.of("URI")); + assertSame(bigquery, job.bigquery()); + assertEquals(EXTRACT_JOB_INFO, job.info()); + } + + @Test + public void testGetFromId() throws Exception { + expect(bigquery.getTable(TABLE_INFO.tableId())).andReturn(TABLE_INFO); + replay(bigquery); + Table loadedTable = Table.get(bigquery, TABLE_INFO.tableId()); + assertNotNull(loadedTable); + assertEquals(TABLE_INFO, loadedTable.info()); + } + + @Test + public void testGetFromStrings() throws Exception { + expect(bigquery.getTable(TABLE_INFO.tableId())).andReturn(TABLE_INFO); + replay(bigquery); + Table loadedTable = Table.get(bigquery, TABLE_ID1.dataset(), TABLE_ID1.table()); + assertNotNull(loadedTable); + assertEquals(TABLE_INFO, loadedTable.info()); + } + + @Test + public void testGetFromIdNull() throws Exception { + expect(bigquery.getTable(TABLE_INFO.tableId())).andReturn(null); + replay(bigquery); + assertNull(Table.get(bigquery, TABLE_INFO.tableId())); + } + + @Test + public void testGetFromStringsNull() throws Exception { + expect(bigquery.getTable(TABLE_INFO.tableId())).andReturn(null); + replay(bigquery); + assertNull(Table.get(bigquery, TABLE_ID1.dataset(), TABLE_ID1.table())); + } + + @Test + public void testGetFromIdWithOptions() throws Exception { + expect(bigquery.getTable(TABLE_INFO.tableId(), BigQuery.TableOption.fields())) + .andReturn(TABLE_INFO); + replay(bigquery); + Table loadedTable = Table.get(bigquery, TABLE_INFO.tableId(), BigQuery.TableOption.fields()); + assertNotNull(loadedTable); + assertEquals(TABLE_INFO, loadedTable.info()); + } + + @Test + public void testGetFromStringsWithOptions() throws Exception { + expect(bigquery.getTable(TABLE_INFO.tableId(), BigQuery.TableOption.fields())) + .andReturn(TABLE_INFO); + replay(bigquery); + Table loadedTable = + Table.get(bigquery, TABLE_ID1.dataset(), TABLE_ID1.table(), BigQuery.TableOption.fields()); + assertNotNull(loadedTable); + assertEquals(TABLE_INFO, loadedTable.info()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/UserDefinedFunctionTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/UserDefinedFunctionTest.java new file mode 100644 index 000000000000..2741aaed89a5 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/UserDefinedFunctionTest.java @@ -0,0 +1,56 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class UserDefinedFunctionTest { + + private static final String INLINE = "inline"; + private static final String URI = "uri"; + private static final UserDefinedFunction INLINE_FUNCTION = + new UserDefinedFunction.InlineFunction(INLINE); + private static final UserDefinedFunction URI_FUNCTION = new UserDefinedFunction.UriFunction(URI); + + @Test + public void testConstructor() { + assertEquals(INLINE, INLINE_FUNCTION.content()); + assertEquals(UserDefinedFunction.Type.INLINE, INLINE_FUNCTION.type()); + assertEquals(URI, URI_FUNCTION.content()); + assertEquals(UserDefinedFunction.Type.FROM_URI, URI_FUNCTION.type()); + } + + @Test + public void testFactoryMethod() { + compareUserDefinedFunction(INLINE_FUNCTION, UserDefinedFunction.inline(INLINE)); + compareUserDefinedFunction(URI_FUNCTION, UserDefinedFunction.fromUri(URI)); + } + + @Test + public void testToAndFromPb() { + compareUserDefinedFunction(INLINE_FUNCTION, UserDefinedFunction.fromPb(INLINE_FUNCTION.toPb())); + compareUserDefinedFunction(URI_FUNCTION, UserDefinedFunction.fromPb(URI_FUNCTION.toPb())); + } + + private void compareUserDefinedFunction(UserDefinedFunction expected, UserDefinedFunction value) { + assertEquals(expected, value); + assertEquals(expected.type(), value.type()); + assertEquals(expected.content(), value.content()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/WriteChannelConfigurationTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/WriteChannelConfigurationTest.java new file mode 100644 index 000000000000..17fa8446d097 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/WriteChannelConfigurationTest.java @@ -0,0 +1,123 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; + +import org.junit.Test; + +import java.nio.charset.StandardCharsets; +import java.util.List; + +public class WriteChannelConfigurationTest { + + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder() + .allowJaggedRows(true) + .allowQuotedNewLines(false) + .encoding(StandardCharsets.UTF_8) + .build(); + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final CreateDisposition CREATE_DISPOSITION = CreateDisposition.CREATE_IF_NEEDED; + private static final WriteDisposition WRITE_DISPOSITION = WriteDisposition.WRITE_APPEND; + private static final Integer MAX_BAD_RECORDS = 42; + private static final String FORMAT = "CSV"; + private static final Boolean IGNORE_UNKNOWN_VALUES = true; + private static final List PROJECTION_FIELDS = ImmutableList.of("field1", "field2"); + private static final Field FIELD_SCHEMA = Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA); + private static final WriteChannelConfiguration LOAD_CONFIGURATION = WriteChannelConfiguration.builder(TABLE_ID) + .createDisposition(CREATE_DISPOSITION) + .writeDisposition(WRITE_DISPOSITION) + .formatOptions(CSV_OPTIONS) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .maxBadRecords(MAX_BAD_RECORDS) + .projectionFields(PROJECTION_FIELDS) + .schema(TABLE_SCHEMA) + .build(); + + @Test + public void testToBuilder() { + compareLoadConfiguration(LOAD_CONFIGURATION, LOAD_CONFIGURATION.toBuilder().build()); + WriteChannelConfiguration configuration = LOAD_CONFIGURATION.toBuilder() + .destinationTable(TableId.of("dataset", "newTable")) + .build(); + assertEquals("newTable", configuration.destinationTable().table()); + configuration = configuration.toBuilder().destinationTable(TABLE_ID).build(); + compareLoadConfiguration(LOAD_CONFIGURATION, configuration); + } + + @Test + public void testOf() { + WriteChannelConfiguration configuration = WriteChannelConfiguration.of(TABLE_ID); + assertEquals(TABLE_ID, configuration.destinationTable()); + configuration = WriteChannelConfiguration.of(TABLE_ID, CSV_OPTIONS); + assertEquals(TABLE_ID, configuration.destinationTable()); + assertEquals(FORMAT, configuration.format()); + assertEquals(CSV_OPTIONS, configuration.csvOptions()); + } + + @Test + public void testToBuilderIncomplete() { + WriteChannelConfiguration configuration = WriteChannelConfiguration.of(TABLE_ID); + compareLoadConfiguration(configuration, configuration.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(TABLE_ID, LOAD_CONFIGURATION.destinationTable()); + assertEquals(CREATE_DISPOSITION, LOAD_CONFIGURATION.createDisposition()); + assertEquals(WRITE_DISPOSITION, LOAD_CONFIGURATION.writeDisposition()); + assertEquals(CSV_OPTIONS, LOAD_CONFIGURATION.csvOptions()); + assertEquals(FORMAT, LOAD_CONFIGURATION.format()); + assertEquals(IGNORE_UNKNOWN_VALUES, LOAD_CONFIGURATION.ignoreUnknownValues()); + assertEquals(MAX_BAD_RECORDS, LOAD_CONFIGURATION.maxBadRecords()); + assertEquals(PROJECTION_FIELDS, LOAD_CONFIGURATION.projectionFields()); + assertEquals(TABLE_SCHEMA, LOAD_CONFIGURATION.schema()); + } + + @Test + public void testToPbAndFromPb() { + assertNull(LOAD_CONFIGURATION.toPb().getLoad().getSourceUris()); + compareLoadConfiguration(LOAD_CONFIGURATION, + WriteChannelConfiguration.fromPb(LOAD_CONFIGURATION.toPb())); + WriteChannelConfiguration configuration = WriteChannelConfiguration.of(TABLE_ID); + compareLoadConfiguration(configuration, WriteChannelConfiguration.fromPb(configuration.toPb())); + } + + private void compareLoadConfiguration(WriteChannelConfiguration expected, WriteChannelConfiguration value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.destinationTable(), value.destinationTable()); + assertEquals(expected.createDisposition(), value.createDisposition()); + assertEquals(expected.writeDisposition(), value.writeDisposition()); + assertEquals(expected.csvOptions(), value.csvOptions()); + assertEquals(expected.format(), value.format()); + assertEquals(expected.ignoreUnknownValues(), value.ignoreUnknownValues()); + assertEquals(expected.maxBadRecords(), value.maxBadRecords()); + assertEquals(expected.projectionFields(), value.projectionFields()); + assertEquals(expected.schema(), value.schema()); + } +} diff --git a/gcloud-java-contrib/README.md b/gcloud-java-contrib/README.md new file mode 100644 index 000000000000..23713f7450a3 --- /dev/null +++ b/gcloud-java-contrib/README.md @@ -0,0 +1,58 @@ +Google Cloud Java Contributions +=============================== + +Packages that provide higher-level abstraction/functionality for common gcloud-java use cases. + +Quickstart +---------- +If you are using Maven, add this to your pom.xml file +```xml + + com.google.gcloud + gcloud-java-contrib + 0.1.3 + +``` +If you are using Gradle, add this to your dependencies +```Groovy +compile 'com.google.gcloud:gcloud-java-contrib:0.1.3' +``` +If you are using SBT, add this to your dependencies +```Scala +libraryDependencies += "com.google.gcloud" % "gcloud-java-contrib" % "0.1.3" +``` + +Java Versions +------------- + +Java 7 or above is required for using this client. + +Versioning +---------- + +This library follows [Semantic Versioning] (http://semver.org/). + +It is currently in major version zero (``0.y.z``), which means that anything +may change at any time and the public API should not be considered +stable. + +Contributing +------------ + +Contributions to this library are always welcome and highly encouraged. + +See `gcloud-java`'s [CONTRIBUTING] documentation and the `gcloud-*` [shared documentation](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/contributing/readme.md#how-to-contribute-to-gcloud) for more information on how to get started. + +Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information. + +License +------- + +Apache 2.0 - See [LICENSE] for more information. + + +[CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md +[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct +[LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE +[cloud-platform]: https://cloud.google.com/ +[developers-console]:https://console.developers.google.com/ diff --git a/gcloud-java-contrib/pom.xml b/gcloud-java-contrib/pom.xml new file mode 100644 index 000000000000..5d5739781727 --- /dev/null +++ b/gcloud-java-contrib/pom.xml @@ -0,0 +1,37 @@ + + + 4.0.0 + com.google.gcloud + gcloud-java-contrib + jar + GCloud Java contributions + + Contains packages that provide higher-level abstraction/functionality for common gcloud-java use cases. + + + com.google.gcloud + gcloud-java-pom + 0.1.4-SNAPSHOT + + + gcloud-java-contrib + + + + ${project.groupId} + gcloud-java + ${project.version} + + + + + + org.codehaus.mojo + exec-maven-plugin + + false + + + + + diff --git a/gcloud-java-core/README.md b/gcloud-java-core/README.md index 2a3be300f4ac..9063bebebbef 100644 --- a/gcloud-java-core/README.md +++ b/gcloud-java-core/README.md @@ -12,14 +12,27 @@ This module provides common functionality required by service-specific modules o Quickstart ---------- -Add this to your pom.xml file +If you are using Maven, add this to your pom.xml file ```xml com.google.gcloud gcloud-java-core - 0.0.10 + 0.1.3 ``` +If you are using Gradle, add this to your dependencies +```Groovy +compile 'com.google.gcloud:gcloud-java-core:0.1.3' +``` +If you are using SBT, add this to your dependencies +```Scala +libraryDependencies += "com.google.gcloud" % "gcloud-java-core" % "0.1.3" +``` + +Troubleshooting +--------------- + +To get help, follow the `gcloud-java` links in the `gcloud-*` [shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting). Java Versions ------------- @@ -31,7 +44,9 @@ Contributing Contributions to this library are always welcome and highly encouraged. -See [CONTRIBUTING] for more information on how to get started. +See `gcloud-java`'s [CONTRIBUTING] documentation and the `gcloud-*` [shared documentation](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/contributing/readme.md#how-to-contribute-to-gcloud) for more information on how to get started. + +Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information. Versioning ---------- @@ -49,5 +64,6 @@ Apache 2.0 - See [LICENSE] for more information. [CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md +[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct [LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE [cloud-platform]: https://cloud.google.com/ diff --git a/gcloud-java-core/pom.xml b/gcloud-java-core/pom.xml index 2275c7f0765b..7373b40abc75 100644 --- a/gcloud-java-core/pom.xml +++ b/gcloud-java-core/pom.xml @@ -11,18 +11,27 @@ com.google.gcloud gcloud-java-pom - 0.0.11-SNAPSHOT + 0.1.4-SNAPSHOT + + gcloud-java-core + com.google.auth google-auth-library-credentials - 0.1.0 + 0.3.1 com.google.auth google-auth-library-oauth2-http - 0.1.0 + 0.3.1 + + + com.google.guava + guava-jdk5 + + com.google.http-client diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/AuthCredentials.java b/gcloud-java-core/src/main/java/com/google/gcloud/AuthCredentials.java index ffc54df77a90..fc5d74d0896c 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/AuthCredentials.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/AuthCredentials.java @@ -18,81 +18,167 @@ import static com.google.common.base.Preconditions.checkNotNull; -import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; -import com.google.api.client.googleapis.compute.ComputeCredential; -import com.google.api.client.googleapis.extensions.appengine.auth.oauth2.AppIdentityCredential; -import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; -import com.google.api.client.http.HttpRequestInitializer; -import com.google.api.client.http.HttpTransport; -import com.google.api.client.http.javanet.NetHttpTransport; -import com.google.api.client.json.jackson.JacksonFactory; -import com.google.auth.http.HttpCredentialsAdapter; +import com.google.auth.oauth2.AccessToken; import com.google.auth.oauth2.GoogleCredentials; +import com.google.auth.oauth2.ServiceAccountCredentials; import java.io.IOException; import java.io.InputStream; -import java.io.ObjectInputStream; -import java.io.ObjectStreamException; import java.io.Serializable; -import java.security.GeneralSecurityException; +import java.lang.reflect.Method; import java.security.PrivateKey; +import java.util.Collection; import java.util.Objects; -import java.util.Set; /** * Credentials for accessing Google Cloud services. */ -public abstract class AuthCredentials implements Serializable { - - private static final long serialVersionUID = 236297804453464604L; +public abstract class AuthCredentials implements Restorable { private static class AppEngineAuthCredentials extends AuthCredentials { - private static final long serialVersionUID = 7931300552744202954L; - private static final AuthCredentials INSTANCE = new AppEngineAuthCredentials(); + private static final AppEngineAuthCredentialsState STATE = new AppEngineAuthCredentialsState(); + + private static class AppEngineCredentials extends GoogleCredentials { + + private final Object appIdentityService; + private final Method getAccessToken; + private final Method getAccessTokenResult; + private final Collection scopes; + + AppEngineCredentials() { + try { + Class factoryClass = + Class.forName("com.google.appengine.api.appidentity.AppIdentityServiceFactory"); + Method method = factoryClass.getMethod("getAppIdentityService"); + this.appIdentityService = method.invoke(null); + Class serviceClass = + Class.forName("com.google.appengine.api.appidentity.AppIdentityService"); + Class tokenResultClass = Class.forName( + "com.google.appengine.api.appidentity.AppIdentityService$GetAccessTokenResult"); + this.getAccessTokenResult = serviceClass.getMethod("getAccessToken", Iterable.class); + this.getAccessToken = tokenResultClass.getMethod("getAccessToken"); + this.scopes = null; + } catch (Exception e) { + throw new RuntimeException("Could not create AppEngineCredentials.", e); + } + } + + AppEngineCredentials(Collection scopes, AppEngineCredentials unscoped) { + this.appIdentityService = unscoped.appIdentityService; + this.getAccessToken = unscoped.getAccessToken; + this.getAccessTokenResult = unscoped.getAccessTokenResult; + this.scopes = scopes; + } + + /** + * Refresh the access token by getting it from the App Identity service + */ + @Override + public AccessToken refreshAccessToken() throws IOException { + if (createScopedRequired()) { + throw new IOException("AppEngineCredentials requires createScoped call before use."); + } + try { + Object accessTokenResult = getAccessTokenResult.invoke(appIdentityService, scopes); + String accessToken = (String) getAccessToken.invoke(accessTokenResult); + return new AccessToken(accessToken, null); + } catch (Exception e) { + throw new IOException("Could not get the access token.", e); + } + } + + @Override + public boolean createScopedRequired() { + return scopes == null || scopes.isEmpty(); + } + + @Override + public GoogleCredentials createScoped(Collection scopes) { + return new AppEngineCredentials(scopes, this); + } + } + + private static class AppEngineAuthCredentialsState + implements RestorableState, Serializable { + + private static final long serialVersionUID = 3558563960848658928L; + + @Override + public AuthCredentials restore() { + return INSTANCE; + } + + @Override + public int hashCode() { + return getClass().getName().hashCode(); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof AppEngineAuthCredentialsState; + } + } @Override - protected HttpRequestInitializer httpRequestInitializer(HttpTransport transport, - Set scopes) { - return new AppIdentityCredential(scopes); + public GoogleCredentials credentials() { + return new AppEngineCredentials(); } - private Object readResolve() throws ObjectStreamException { - return INSTANCE; + @Override + public RestorableState capture() { + return STATE; } } public static class ServiceAccountAuthCredentials extends AuthCredentials { - private static final long serialVersionUID = 8007708734318445901L; private final String account; private final PrivateKey privateKey; - private static final AuthCredentials NO_CREDENTIALS = new ServiceAccountAuthCredentials(); + private static class ServiceAccountAuthCredentialsState + implements RestorableState, Serializable { + + private static final long serialVersionUID = -7302180782414633639L; + + private final String account; + private final PrivateKey privateKey; + + private ServiceAccountAuthCredentialsState(String account, PrivateKey privateKey) { + this.account = account; + this.privateKey = privateKey; + } + + @Override + public AuthCredentials restore() { + return new ServiceAccountAuthCredentials(account, privateKey); + } + + @Override + public int hashCode() { + return Objects.hash(account, privateKey); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof ServiceAccountAuthCredentialsState)) { + return false; + } + ServiceAccountAuthCredentialsState other = (ServiceAccountAuthCredentialsState) obj; + return Objects.equals(account, other.account) + && Objects.equals(privateKey, other.privateKey); + } + } ServiceAccountAuthCredentials(String account, PrivateKey privateKey) { this.account = checkNotNull(account); this.privateKey = checkNotNull(privateKey); } - ServiceAccountAuthCredentials() { - account = null; - privateKey = null; - } - @Override - protected HttpRequestInitializer httpRequestInitializer( - HttpTransport transport, Set scopes) { - GoogleCredential.Builder builder = new GoogleCredential.Builder() - .setTransport(transport) - .setJsonFactory(new JacksonFactory()); - if (privateKey != null) { - builder.setServiceAccountPrivateKey(privateKey); - builder.setServiceAccountId(account); - builder.setServiceAccountScopes(scopes); - } - return builder.build(); + public ServiceAccountCredentials credentials() { + return new ServiceAccountCredentials(null, account, privateKey, null, null); } public String account() { @@ -104,81 +190,65 @@ public PrivateKey privateKey() { } @Override - public int hashCode() { - return Objects.hash(account, privateKey); - } - - @Override - public boolean equals(Object obj) { - if (!(obj instanceof ServiceAccountAuthCredentials)) { - return false; - } - ServiceAccountAuthCredentials other = (ServiceAccountAuthCredentials) obj; - return Objects.equals(account, other.account) - && Objects.equals(privateKey, other.privateKey); + public RestorableState capture() { + return new ServiceAccountAuthCredentialsState(account, privateKey); } } - private static class ComputeEngineAuthCredentials extends AuthCredentials { + public static class ApplicationDefaultAuthCredentials extends AuthCredentials { - private static final long serialVersionUID = -5217355402127260144L; + private GoogleCredentials googleCredentials; - private transient ComputeCredential computeCredential; + private static final ApplicationDefaultAuthCredentialsState STATE = + new ApplicationDefaultAuthCredentialsState(); - ComputeEngineAuthCredentials() throws IOException, GeneralSecurityException { - computeCredential = getComputeCredential(); - } + private static class ApplicationDefaultAuthCredentialsState + implements RestorableState, Serializable { - private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { - in.defaultReadObject(); - try { - computeCredential = getComputeCredential(); - } catch (GeneralSecurityException e) { - throw new IOException(e); - } - } - - @Override - protected HttpRequestInitializer httpRequestInitializer(HttpTransport transport, - Set scopes) { - return computeCredential; - } - } + private static final long serialVersionUID = -8839085552021212257L; - private static class ApplicationDefaultAuthCredentials extends AuthCredentials { + @Override + public AuthCredentials restore() { + try { + return new ApplicationDefaultAuthCredentials(); + } catch (IOException e) { + throw new IllegalStateException( + "Could not restore " + ApplicationDefaultAuthCredentials.class.getSimpleName(), e); + } + } - private static final long serialVersionUID = -8306873864136099893L; + @Override + public int hashCode() { + return getClass().getName().hashCode(); + } - private transient GoogleCredentials googleCredentials; + @Override + public boolean equals(Object obj) { + return obj instanceof ApplicationDefaultAuthCredentialsState; + } + } ApplicationDefaultAuthCredentials() throws IOException { googleCredentials = GoogleCredentials.getApplicationDefault(); } - private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { - in.defaultReadObject(); - googleCredentials = GoogleCredentials.getApplicationDefault(); + @Override + public GoogleCredentials credentials() { + return googleCredentials; } @Override - protected HttpRequestInitializer httpRequestInitializer(HttpTransport transport, - Set scopes) { - return new HttpCredentialsAdapter(googleCredentials); + public RestorableState capture() { + return STATE; } } - protected abstract HttpRequestInitializer httpRequestInitializer(HttpTransport transport, - Set scopes); + public abstract GoogleCredentials credentials(); public static AuthCredentials createForAppEngine() { return AppEngineAuthCredentials.INSTANCE; } - public static AuthCredentials createForComputeEngine() - throws IOException, GeneralSecurityException { - return new ComputeEngineAuthCredentials(); - } - /** * Returns the Application Default Credentials. * @@ -188,8 +258,8 @@ public static AuthCredentials createForComputeEngine() * variable GOOGLE_APPLICATION_CREDENTIALS. *

* - * @return the credentials instance. - * @throws IOException if the credentials cannot be created in the current environment. + * @return the credentials instance + * @throws IOException if the credentials cannot be created in the current environment */ public static AuthCredentials createApplicationDefaults() throws IOException { return new ApplicationDefaultAuthCredentials(); @@ -205,7 +275,7 @@ public static AuthCredentials createApplicationDefaults() throws IOException { * * @param account id of the Service Account * @param privateKey private key associated to the account - * @return the credentials instance. + * @return the credentials instance */ public static ServiceAccountAuthCredentials createFor(String account, PrivateKey privateKey) { return new ServiceAccountAuthCredentials(account, privateKey); @@ -220,26 +290,20 @@ public static ServiceAccountAuthCredentials createFor(String account, PrivateKey *

* * @param jsonCredentialStream stream for Service Account Credentials in JSON format - * @return the credentials instance. - * @throws IOException if the credentials cannot be created from the stream. + * @return the credentials instance + * @throws IOException if the credentials cannot be created from the stream */ public static ServiceAccountAuthCredentials createForJson(InputStream jsonCredentialStream) throws IOException { - GoogleCredential tempCredentials = GoogleCredential.fromStream(jsonCredentialStream); - return new ServiceAccountAuthCredentials(tempCredentials.getServiceAccountId(), - tempCredentials.getServiceAccountPrivateKey()); - } - - public static AuthCredentials noCredentials() { - return ServiceAccountAuthCredentials.NO_CREDENTIALS; - } - - static ComputeCredential getComputeCredential() throws IOException, GeneralSecurityException { - NetHttpTransport transport = GoogleNetHttpTransport.newTrustedTransport(); - // Try to connect using Google Compute Engine service account credentials. - ComputeCredential credential = new ComputeCredential(transport, new JacksonFactory()); - // Force token refresh to detect if we are running on Google Compute Engine. - credential.refreshToken(); - return credential; + GoogleCredentials tempCredentials = GoogleCredentials.fromStream(jsonCredentialStream); + if (tempCredentials instanceof ServiceAccountCredentials) { + ServiceAccountCredentials tempServiceAccountCredentials = + (ServiceAccountCredentials) tempCredentials; + return new ServiceAccountAuthCredentials( + tempServiceAccountCredentials.getClientEmail(), + tempServiceAccountCredentials.getPrivateKey()); + } + throw new IOException( + "The given JSON Credentials Stream is not for a service account credential."); } } diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/BaseService.java b/gcloud-java-core/src/main/java/com/google/gcloud/BaseService.java index 982d3058295c..d9e6f2db7c95 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/BaseService.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/BaseService.java @@ -16,9 +16,39 @@ package com.google.gcloud; -public abstract class BaseService> +import com.google.gcloud.ExceptionHandler.Interceptor; + +/** + * Base class for service objects. + * + * @param the {@code ServiceOptions} subclass corresponding to the service + */ +public abstract class BaseService> implements Service { + public static final Interceptor EXCEPTION_HANDLER_INTERCEPTOR = new Interceptor() { + + private static final long serialVersionUID = -8429573486870467828L; + + @Override + public RetryResult afterEval(Exception exception, RetryResult retryResult) { + return Interceptor.RetryResult.CONTINUE_EVALUATION; + } + + @Override + public RetryResult beforeEval(Exception exception) { + if (exception instanceof BaseServiceException) { + boolean retriable = ((BaseServiceException) exception).retryable(); + return retriable ? Interceptor.RetryResult.RETRY : Interceptor.RetryResult.NO_RETRY; + } + return Interceptor.RetryResult.CONTINUE_EVALUATION; + } + }; + public static final ExceptionHandler EXCEPTION_HANDLER = ExceptionHandler.builder() + .abortOn(RuntimeException.class) + .interceptor(EXCEPTION_HANDLER_INTERCEPTOR) + .build(); + private final OptionsT options; protected BaseService(OptionsT options) { diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java b/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java new file mode 100644 index 000000000000..579340f1256e --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/BaseServiceException.java @@ -0,0 +1,223 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import com.google.api.client.googleapis.json.GoogleJsonError; +import com.google.api.client.googleapis.json.GoogleJsonResponseException; +import com.google.common.base.MoreObjects; + +import java.io.IOException; +import java.io.Serializable; +import java.net.SocketTimeoutException; +import java.util.Collections; +import java.util.Objects; +import java.util.Set; + +/** + * Base class for all service exceptions. + */ +public class BaseServiceException extends RuntimeException { + + protected static final class Error implements Serializable { + + private static final long serialVersionUID = -4019600198652965721L; + + private final Integer code; + private final String reason; + + public Error(Integer code, String reason) { + this.code = code; + this.reason = reason; + } + + /** + * Returns the code associated with this exception. + */ + public Integer code() { + return code; + } + + /** + * Returns the reason that caused the exception. + */ + public String reason() { + return reason; + } + + boolean isRetryable(Set retryableErrors) { + for (Error retryableError : retryableErrors) { + if ((retryableError.code() == null || retryableError.code().equals(this.code())) + && (retryableError.reason() == null || retryableError.reason().equals(this.reason()))) { + return true; + } + } + return false; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this).add("code", code).add("reason", reason).toString(); + } + + @Override + public int hashCode() { + return Objects.hash(code, reason); + } + } + + private static final long serialVersionUID = 759921776378760835L; + public static final int UNKNOWN_CODE = 0; + + private final int code; + private final boolean retryable; + private final String reason; + private final boolean idempotent; + private final String location; + private final String debugInfo; + + public BaseServiceException(IOException exception, boolean idempotent) { + super(message(exception), exception); + int code = UNKNOWN_CODE; + String reason = null; + String location = null; + String debugInfo = null; + if (exception instanceof GoogleJsonResponseException) { + GoogleJsonError jsonError = ((GoogleJsonResponseException) exception).getDetails(); + Error error = error(jsonError); + code = error.code; + reason = error.reason; + if (reason != null) { + GoogleJsonError.ErrorInfo errorInfo = jsonError.getErrors().get(0); + location = errorInfo.getLocation(); + debugInfo = (String) errorInfo.get("debugInfo"); + } + } + this.code = code; + this.retryable = idempotent && isRetryable(exception); + this.reason = reason; + this.idempotent = idempotent; + this.location = location; + this.debugInfo = debugInfo; + } + + public BaseServiceException(GoogleJsonError error, boolean idempotent) { + super(error.getMessage()); + this.code = error.getCode(); + this.reason = reason(error); + this.idempotent = idempotent; + this.retryable = idempotent && isRetryable(error); + this.location = null; + this.debugInfo = null; + } + + public BaseServiceException(int code, String message, String reason, boolean idempotent) { + this(code, message, reason, idempotent, null); + } + + public BaseServiceException(int code, String message, String reason, boolean idempotent, + Throwable cause) { + super(message, cause); + this.code = code; + this.reason = reason; + this.idempotent = idempotent; + this.retryable = idempotent && new Error(code, reason).isRetryable(retryableErrors()); + this.location = null; + this.debugInfo = null; + } + + protected Set retryableErrors() { + return Collections.emptySet(); + } + + protected boolean isRetryable(GoogleJsonError error) { + return error != null && error(error).isRetryable(retryableErrors()); + } + + protected boolean isRetryable(IOException exception) { + if (exception instanceof GoogleJsonResponseException) { + return isRetryable(((GoogleJsonResponseException) exception).getDetails()); + } + return exception instanceof SocketTimeoutException; + } + + /** + * Returns the code associated with this exception. + */ + public int code() { + return code; + } + + /** + * Returns the reason that caused the exception. + */ + public String reason() { + return reason; + } + + /** + * Returns {@code true} when it is safe to retry the operation that caused this exception. + */ + public boolean retryable() { + return retryable; + } + + /** + * Returns {@code true} when the operation that caused this exception had no side effects. + */ + public boolean idempotent() { + return idempotent; + } + + /** + * Returns the service location where the error causing the exception occurred. Returns + * {@code null} if not set. + */ + public String location() { + return location; + } + + protected String debugInfo() { + return debugInfo; + } + + protected static String reason(GoogleJsonError error) { + if (error.getErrors() != null && !error.getErrors().isEmpty()) { + return error.getErrors().get(0).getReason(); + } + return null; + } + + protected static Error error(GoogleJsonError error) { + return new Error(error.getCode(), reason(error)); + } + + protected static String message(IOException exception) { + if (exception instanceof GoogleJsonResponseException) { + return ((GoogleJsonResponseException) exception).getDetails().getMessage(); + } + return exception.getMessage(); + } + + protected static void translateAndPropagateIfPossible(RetryHelper.RetryHelperException ex) { + if (ex.getCause() instanceof BaseServiceException) { + throw (BaseServiceException) ex.getCause(); + } + if (ex instanceof RetryHelper.RetryInterruptedException) { + RetryHelper.RetryInterruptedException.propagate(); + } + } +} diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/BaseWriteChannel.java b/gcloud-java-core/src/main/java/com/google/gcloud/BaseWriteChannel.java new file mode 100644 index 000000000000..e05383a65826 --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/BaseWriteChannel.java @@ -0,0 +1,293 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import com.google.common.base.MoreObjects; + +import java.io.IOException; +import java.io.Serializable; +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Objects; + +/** + * Base implementation for a {@link WriteChannel}. + * + * @param the service options used by the channel to issue RPC requests + * @param the entity this channel writes data to. Possibly with additional configuration + */ +public abstract class BaseWriteChannel< + ServiceOptionsT extends ServiceOptions, + EntityT extends Serializable> implements WriteChannel { + + private static final int MIN_CHUNK_SIZE = 256 * 1024; + private static final int DEFAULT_CHUNK_SIZE = 8 * MIN_CHUNK_SIZE; + + private final ServiceOptionsT options; + private final EntityT entity; + private final String uploadId; + private int position; + private byte[] buffer = new byte[0]; + private int limit; + private boolean isOpen = true; + private int chunkSize = defaultChunkSize(); + + protected int minChunkSize() { + return MIN_CHUNK_SIZE; + } + + protected int defaultChunkSize() { + return DEFAULT_CHUNK_SIZE; + } + + /** + * Writes {@code length} bytes of {@link #buffer()} to the {@link #uploadId()} URL. + * + * @param length the number of bytes to write from {@link #buffer()} + * @param last if {@code true} the resumable session is closed + */ + protected abstract void flushBuffer(int length, boolean last); + + protected ServiceOptionsT options() { + return options; + } + + protected EntityT entity() { + return entity; + } + + protected String uploadId() { + return uploadId; + } + + protected int position() { + return position; + } + + protected byte[] buffer() { + return buffer; + } + + protected int limit() { + return limit; + } + + protected int chunkSize() { + return chunkSize; + } + + @Override + public final void chunkSize(int chunkSize) { + chunkSize = (chunkSize / minChunkSize()) * minChunkSize(); + this.chunkSize = Math.max(minChunkSize(), chunkSize); + } + + protected BaseWriteChannel(ServiceOptionsT options, EntityT entity, String uploadId) { + this.options = options; + this.entity = entity; + this.uploadId = uploadId; + } + + private void flush() { + if (limit >= chunkSize) { + final int length = limit - limit % minChunkSize(); + flushBuffer(length, false); + position += length; + limit -= length; + byte[] temp = new byte[chunkSize]; + System.arraycopy(buffer, length, temp, 0, limit); + buffer = temp; + } + } + + private void validateOpen() throws IOException { + if (!isOpen) { + throw new IOException("stream is closed"); + } + } + + @Override + public final int write(ByteBuffer byteBuffer) throws IOException { + validateOpen(); + int toWrite = byteBuffer.remaining(); + int spaceInBuffer = buffer.length - limit; + if (spaceInBuffer >= toWrite) { + byteBuffer.get(buffer, limit, toWrite); + } else { + buffer = Arrays.copyOf(buffer, Math.max(chunkSize, buffer.length + toWrite - spaceInBuffer)); + byteBuffer.get(buffer, limit, toWrite); + } + limit += toWrite; + flush(); + return toWrite; + } + + @Override + public boolean isOpen() { + return isOpen; + } + + @Override + public final void close() throws IOException { + if (isOpen) { + flushBuffer(limit, true); + position += buffer.length; + isOpen = false; + buffer = null; + } + } + + /** + * Creates a {@link BaseState.Builder} for the current write channel. + */ + protected abstract BaseState.Builder stateBuilder(); + + @Override + public RestorableState capture() { + byte[] bufferToSave = null; + if (isOpen) { + flush(); + bufferToSave = Arrays.copyOf(buffer, limit); + } + return stateBuilder() + .position(position) + .buffer(bufferToSave) + .isOpen(isOpen) + .chunkSize(chunkSize) + .build(); + } + + /** + * Restores the state of the current write channel given a {@link BaseState} object. + */ + protected void restore(BaseState state) { + if (state.buffer != null) { + this.buffer = state.buffer.clone(); + this.limit = state.buffer.length; + } + this.position = state.position; + this.isOpen = state.isOpen; + this.chunkSize = state.chunkSize; + } + + protected abstract static class BaseState< + ServiceOptionsT extends ServiceOptions, EntityT extends Serializable> + implements RestorableState, Serializable { + + private static final long serialVersionUID = 8541062465055125619L; + + protected final ServiceOptionsT serviceOptions; + protected final EntityT entity; + protected final String uploadId; + protected final int position; + protected final byte[] buffer; + protected final boolean isOpen; + protected final int chunkSize; + + protected BaseState(Builder builder) { + this.serviceOptions = builder.serviceOptions; + this.entity = builder.entity; + this.uploadId = builder.uploadId; + this.position = builder.position; + this.buffer = builder.buffer; + this.isOpen = builder.isOpen; + this.chunkSize = builder.chunkSize; + } + + /** + * Base builder for a write channel's state. Users are not supposed to access this class + * directly. + * + * @param the service options used by the channel to issue RPC requests + * @param the entity this channel writes data to. Possibly with additional + * configuration + */ + public abstract static class Builder< + ServiceOptionsT extends ServiceOptions, + EntityT extends Serializable> { + private final ServiceOptionsT serviceOptions; + private final EntityT entity; + private final String uploadId; + private int position; + private byte[] buffer; + private boolean isOpen; + private int chunkSize; + + protected Builder(ServiceOptionsT options, EntityT entity, String uploadId) { + this.serviceOptions = options; + this.entity = entity; + this.uploadId = uploadId; + } + + public Builder position(int position) { + this.position = position; + return this; + } + + public Builder buffer(byte[] buffer) { + this.buffer = buffer; + return this; + } + + public Builder isOpen(boolean isOpen) { + this.isOpen = isOpen; + return this; + } + + public Builder chunkSize(int chunkSize) { + this.chunkSize = chunkSize; + return this; + } + + public abstract RestorableState build(); + } + + @Override + public int hashCode() { + return Objects.hash(serviceOptions, entity, uploadId, position, isOpen, chunkSize, + Arrays.hashCode(buffer)); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (!(obj instanceof BaseState)) { + return false; + } + final BaseState other = (BaseState) obj; + return Objects.equals(this.serviceOptions, other.serviceOptions) + && Objects.equals(this.entity, other.entity) + && Objects.equals(this.uploadId, other.uploadId) + && Objects.deepEquals(this.buffer, other.buffer) + && this.position == other.position + && this.isOpen == other.isOpen + && this.chunkSize == other.chunkSize; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("entity", entity) + .add("uploadId", uploadId) + .add("position", position) + .add("isOpen", isOpen) + .toString(); + } + } +} diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/ExceptionHandler.java b/gcloud-java-core/src/main/java/com/google/gcloud/ExceptionHandler.java index a0fab3dca566..39d4c4e75a1a 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/ExceptionHandler.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/ExceptionHandler.java @@ -64,7 +64,7 @@ enum RetryResult { * This method is called after the evaluation and could alter its result. * * @param exception the exception that is being evaluated - * @param retryResult the result of the evaluation so far. + * @param retryResult the result of the evaluation so far * @return {@link RetryResult} to indicate if the exception should be ignored ( * {@link RetryResult#RETRY}), propagated ({@link RetryResult#NO_RETRY}), or evaluation * should proceed ({@link RetryResult#CONTINUE_EVALUATION}). @@ -231,11 +231,11 @@ void verifyCaller(Callable callable) { } } - public Set> getRetriableExceptions() { + public Set> retriableExceptions() { return retriableExceptions; } - public Set> getNonRetriableExceptions() { + public Set> nonRetriableExceptions() { return nonRetriableExceptions; } @@ -250,7 +250,7 @@ boolean shouldRetry(Exception ex) { Interceptor.RetryResult retryResult = retryInfo == null ? Interceptor.RetryResult.NO_RETRY : retryInfo.retry; for (Interceptor interceptor : interceptors) { - Interceptor.RetryResult interceptorRetry = + Interceptor.RetryResult interceptorRetry = checkNotNull(interceptor.afterEval(ex, retryResult)); if (interceptorRetry != Interceptor.RetryResult.CONTINUE_EVALUATION) { retryResult = interceptorRetry; @@ -262,7 +262,7 @@ boolean shouldRetry(Exception ex) { /** * Returns an instance which retry any checked exception and abort on any runtime exception. */ - public static ExceptionHandler getDefaultInstance() { + public static ExceptionHandler defaultInstance() { return DEFAULT_INSTANCE; } diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/Page.java b/gcloud-java-core/src/main/java/com/google/gcloud/Page.java new file mode 100644 index 000000000000..53f3a3842a18 --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/Page.java @@ -0,0 +1,69 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import java.util.Iterator; + +/** + * Interface for Google Cloud paginated results. + * + *

+ * Use {@code Page} to iterate through all values (also in next pages): + *

 {@code
+ * Page page = ...; // get a Page instance
+ * Iterator iterator = page.iterateAll();
+ * while (iterator.hasNext()) {
+ *   T value = iterator.next();
+ *   // do something with value
+ * }}
+ *

+ * Or handle pagination explicitly: + *

 {@code
+ * Page page = ...; // get a Page instance
+ * while (page != null) {
+ *   for (T value : page.values()) {
+ *     // do something with value
+ *   }
+ *   page = page.nextPage();
+ * }}
+ * + * @param the value type that the page holds + */ +public interface Page { + + /** + * Returns the values contained in this page. + */ + Iterable values(); + + /** + * Returns an iterator for all values, possibly also in the next pages. Once current page's values + * are traversed the iterator fetches next page, if any. + */ + Iterator iterateAll(); + + /** + * Returns the cursor for the nextPage or {@code null} if no more results. + */ + String nextPageCursor(); + + /** + * Returns the next page of results or {@code null} if no more result. + */ + Page nextPage(); + +} diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/PageImpl.java b/gcloud-java-core/src/main/java/com/google/gcloud/PageImpl.java new file mode 100644 index 000000000000..2dc031ab9bd4 --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/PageImpl.java @@ -0,0 +1,144 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import com.google.common.collect.AbstractIterator; +import com.google.common.collect.ImmutableMap; + +import java.io.Serializable; +import java.util.Collections; +import java.util.Iterator; +import java.util.Map; +import java.util.Objects; + +/** + * Base implementation for Google Cloud paginated results. + * + * @param the value type that the page holds + */ +public class PageImpl implements Page, Serializable { + + private static final long serialVersionUID = 3914827379823557934L; + + private final String cursor; + private final Iterable results; + private final NextPageFetcher pageFetcher; + + /** + * Interface for fetching the next page of results from the service. + * + * @param the value type that the page holds + */ + public interface NextPageFetcher extends Serializable { + Page nextPage(); + } + + static class PageIterator extends AbstractIterator { + + private Iterator currentPageIterator; + private Page currentPage; + + PageIterator(Page currentPage) { + this.currentPageIterator = currentPage.values().iterator(); + this.currentPage = currentPage; + } + + @Override + protected T computeNext() { + while (!currentPageIterator.hasNext()) { + currentPage = currentPage.nextPage(); + if (currentPage == null) { + return endOfData(); + } + currentPageIterator = currentPage.values().iterator(); + } + return currentPageIterator.next(); + } + } + + /** + * Creates a {@code PageImpl} object. In order for the object to be serializable the {@code + * results} parameter must be serializable. + */ + public PageImpl(NextPageFetcher pageFetcher, String cursor, Iterable results) { + this.pageFetcher = pageFetcher; + this.cursor = cursor; + this.results = results; + } + + @Override + public Iterable values() { + return results == null ? Collections.emptyList() : results; + } + + @Override + public Iterator iterateAll() { + return new PageIterator<>(this); + } + + @Override + public String nextPageCursor() { + return cursor; + } + + @Override + public Page nextPage() { + if (cursor == null || pageFetcher == null) { + return null; + } + return pageFetcher.nextPage(); + } + + @Override + public int hashCode() { + return Objects.hash(cursor, results); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof PageImpl)) { + return false; + } + PageImpl other = (PageImpl) obj; + return Objects.equals(cursor, other.cursor) + && Objects.equals(results, other.results); + } + + /** + * Utility method to construct the options map for the next page request. + * + * @param the value type that the page holds. Instances of {@code T} should be + * {@code Serializable} + * @param pageTokenOption the key for the next page cursor option in the options map + * @param cursor the cursor for the next page + * @param optionMap the previous options map + * @return the options map for the next page request + */ + public static Map nextRequestOptions( + T pageTokenOption, String cursor, Map optionMap) { + ImmutableMap.Builder builder = ImmutableMap.builder(); + if (cursor != null) { + builder.put(pageTokenOption, cursor); + } + for (Map.Entry option : optionMap.entrySet()) { + if (!Objects.equals(option.getKey(), pageTokenOption)) { + builder.put(option.getKey(), option.getValue()); + } + } + return builder.build(); + } +} diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/ReadChannel.java b/gcloud-java-core/src/main/java/com/google/gcloud/ReadChannel.java new file mode 100644 index 000000000000..7537c5a8ce0b --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/ReadChannel.java @@ -0,0 +1,57 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import java.io.Closeable; +import java.io.IOException; +import java.nio.channels.ReadableByteChannel; + +/** + * A channel for reading data from a Google Cloud object. + * + *

Implementations of this class may buffer data internally to reduce remote calls. This + * interface implements {@link Restorable} to allow saving the reader's state to continue reading + * afterwards. + *

+ */ +public interface ReadChannel extends ReadableByteChannel, Closeable, Restorable { + + /** + * Overridden to remove IOException. + * + * @see java.nio.channels.Channel#close() + */ + @Override + void close(); + + void seek(int position) throws IOException; + + /** + * Sets the minimum size that will be read by a single RPC. + * Read data will be locally buffered until consumed. + */ + void chunkSize(int chunkSize); + + /** + * Captures the read channel state so that it can be saved and restored afterwards. + * + * @return a {@link RestorableState} object that contains the read channel state and can restore + * it afterwards. + */ + @Override + RestorableState capture(); +} diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/Restorable.java b/gcloud-java-core/src/main/java/com/google/gcloud/Restorable.java new file mode 100644 index 000000000000..90633c70046f --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/Restorable.java @@ -0,0 +1,48 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +/** + * Implementation of this interface can persist their state and restore from it. + * + *

+ * A typical capture usage: + *

  {@code
+ * X restorableObj; // X instanceof Restorable
+ * RestorableState state = restorableObj.capture();
+ * .. persist state
+ * }
+ * + * A typical restore usage: + *
  {@code
+ * RestorableState state = ... // read from persistence
+ * X restorableObj = state.restore();
+ * ...
+ * }
+ * + * @param the restorable object's type + */ +public interface Restorable> { + + /** + * Captures the state of this object. + * + * @return a {@link RestorableState} instance that contains the state for this object and can + * restore it afterwards. + */ + RestorableState capture(); +} diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/RestorableState.java b/gcloud-java-core/src/main/java/com/google/gcloud/RestorableState.java index 9cd3ee5c3c4c..d6ce736ae856 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/RestorableState.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/RestorableState.java @@ -22,8 +22,10 @@ * * Implementations of this class must implement {@link java.io.Serializable} to ensure that the * state of a the object can be correctly serialized. + * + * @param the restored object's type */ -public interface RestorableState { +public interface RestorableState> { /** * Returns an object whose internal state reflects the one saved in the invocation object. diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/RetryHelper.java b/gcloud-java-core/src/main/java/com/google/gcloud/RetryHelper.java index 7b47209cd3ff..9b9c1f6a3124 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/RetryHelper.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/RetryHelper.java @@ -194,9 +194,9 @@ private V doRetry() throws RetryHelperException { } exception = e; } - if (attemptNumber >= params.getRetryMaxAttempts() - || attemptNumber >= params.getRetryMinAttempts() - && stopwatch.elapsed(MILLISECONDS) >= params.getTotalRetryPeriodMillis()) { + if (attemptNumber >= params.retryMaxAttempts() + || attemptNumber >= params.retryMinAttempts() + && stopwatch.elapsed(MILLISECONDS) >= params.totalRetryPeriodMillis()) { throw new RetriesExhaustedException(this + ": Too many failures, giving up", exception); } long sleepDurationMillis = getSleepDuration(params, attemptNumber); @@ -215,9 +215,9 @@ private V doRetry() throws RetryHelperException { @VisibleForTesting static long getSleepDuration(RetryParams retryParams, int attemptsSoFar) { - long initialDelay = retryParams.getInitialRetryDelayMillis(); - double backoffFactor = retryParams.getRetryDelayBackoffFactor(); - long maxDelay = retryParams.getMaxRetryDelayMillis(); + long initialDelay = retryParams.initialRetryDelayMillis(); + double backoffFactor = retryParams.retryDelayBackoffFactor(); + long maxDelay = retryParams.maxRetryDelayMillis(); long retryDelay = getExponentialValue(initialDelay, backoffFactor, maxDelay, attemptsSoFar); return (long) ((random() / 2.0 + .75) * retryDelay); } @@ -228,8 +228,8 @@ private static long getExponentialValue(long initialDelay, double backoffFactor, } public static V runWithRetries(Callable callable) throws RetryHelperException { - return runWithRetries(callable, RetryParams.getDefaultInstance(), - ExceptionHandler.getDefaultInstance()); + return runWithRetries(callable, RetryParams.defaultInstance(), + ExceptionHandler.defaultInstance()); } public static V runWithRetries(Callable callable, RetryParams params, diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/RetryParams.java b/gcloud-java-core/src/main/java/com/google/gcloud/RetryParams.java index 24983326ccae..ab3644c6d747 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/RetryParams.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/RetryParams.java @@ -38,8 +38,8 @@ * {@code RetryParams}, first create a {@link RetryParams.Builder}. The builder is mutable and each * of the parameters can be set (any unset parameters will fallback to the defaults). The * {@code Builder} can be then used to create an immutable {@code RetryParams} object. For default - * {@code RetryParams} use {@link #getDefaultInstance}. Default settings are subject to change - * release to release. If you require specific settings, explicitly create an instance of + * {@code RetryParams} use {@link #defaultInstance}. Default settings are subject to change release + * to release. If you require specific settings, explicitly create an instance of * {@code RetryParams} with all the required settings. * * @see RetryHelper @@ -91,12 +91,12 @@ private Builder() { retryDelayBackoffFactor = DEFAULT_RETRY_DELAY_BACKOFF_FACTOR; totalRetryPeriodMillis = DEFAULT_TOTAL_RETRY_PERIOD_MILLIS; } else { - retryMinAttempts = retryParams.getRetryMinAttempts(); - retryMaxAttempts = retryParams.getRetryMaxAttempts(); - initialRetryDelayMillis = retryParams.getInitialRetryDelayMillis(); - maxRetryDelayMillis = retryParams.getMaxRetryDelayMillis(); - retryDelayBackoffFactor = retryParams.getRetryDelayBackoffFactor(); - totalRetryPeriodMillis = retryParams.getTotalRetryPeriodMillis(); + retryMinAttempts = retryParams.retryMinAttempts(); + retryMaxAttempts = retryParams.retryMaxAttempts(); + initialRetryDelayMillis = retryParams.initialRetryDelayMillis(); + maxRetryDelayMillis = retryParams.maxRetryDelayMillis(); + retryDelayBackoffFactor = retryParams.retryDelayBackoffFactor(); + totalRetryPeriodMillis = retryParams.totalRetryPeriodMillis(); } } @@ -201,7 +201,7 @@ private RetryParams(Builder builder) { /** * Returns an instance with the default parameters. */ - public static RetryParams getDefaultInstance() { + public static RetryParams defaultInstance() { return DEFAULT_INSTANCE; } @@ -216,14 +216,14 @@ public static RetryParams noRetries() { /** * Returns the retryMinAttempts. Default value is {@value #DEFAULT_RETRY_MIN_ATTEMPTS}. */ - public int getRetryMinAttempts() { + public int retryMinAttempts() { return retryMinAttempts; } /** * Returns the retryMaxAttempts. Default value is {@value #DEFAULT_RETRY_MAX_ATTEMPTS}. */ - public int getRetryMaxAttempts() { + public int retryMaxAttempts() { return retryMaxAttempts; } @@ -231,14 +231,14 @@ public int getRetryMaxAttempts() { * Returns the initialRetryDelayMillis. Default value is * {@value #DEFAULT_INITIAL_RETRY_DELAY_MILLIS}. */ - public long getInitialRetryDelayMillis() { + public long initialRetryDelayMillis() { return initialRetryDelayMillis; } /** * Returns the maxRetryDelayMillis. Default values is {@value #DEFAULT_MAX_RETRY_DELAY_MILLIS}. */ - public long getMaxRetryDelayMillis() { + public long maxRetryDelayMillis() { return maxRetryDelayMillis; } @@ -246,14 +246,15 @@ public long getMaxRetryDelayMillis() { * Returns the maxRetryDelayBackoffFactor. Default values is * {@value #DEFAULT_RETRY_DELAY_BACKOFF_FACTOR}. */ - public double getRetryDelayBackoffFactor() { + public double retryDelayBackoffFactor() { return retryDelayBackoffFactor; } /** - * Returns the totalRetryPeriodMillis. Default value is {@value #DEFAULT_TOTAL_RETRY_PERIOD_MILLIS}. + * Returns the totalRetryPeriodMillis. Default value is + * {@value #DEFAULT_TOTAL_RETRY_PERIOD_MILLIS}. */ - public long getTotalRetryPeriodMillis() { + public long totalRetryPeriodMillis() { return totalRetryPeriodMillis; } diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/Service.java b/gcloud-java-core/src/main/java/com/google/gcloud/Service.java index 19759fb20e21..60bc26670f2e 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/Service.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/Service.java @@ -16,6 +16,11 @@ package com.google.gcloud; -public interface Service> { +/** + * Interface for service objects. + * + * @param the {@code ServiceOptions} subclass corresponding to the service + */ +public interface Service> { OptionsT options(); } diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/ServiceFactory.java b/gcloud-java-core/src/main/java/com/google/gcloud/ServiceFactory.java new file mode 100644 index 000000000000..1727e9c3976f --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/ServiceFactory.java @@ -0,0 +1,32 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +/** + * A base interface for all service factories. + * + *

Implementation must provide a public no-arg constructor. + * Loading of a factory implementation is done via {@link java.util.ServiceLoader}. + * + * @param the service subclass + * @param the {@code ServiceOptions} subclass corresponding to the service + */ +@SuppressWarnings("rawtypes") +public interface ServiceFactory { + + ServiceT create(ServiceOptionsT serviceOptions); +} diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/ServiceOptions.java b/gcloud-java-core/src/main/java/com/google/gcloud/ServiceOptions.java index 29245e24f3cd..31e543809464 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/ServiceOptions.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/ServiceOptions.java @@ -17,7 +17,7 @@ package com.google.gcloud; import static com.google.common.base.MoreObjects.firstNonNull; -import static com.google.common.base.Preconditions.checkNotNull; +import static com.google.common.base.Preconditions.checkArgument; import static java.nio.charset.StandardCharsets.UTF_8; import com.google.api.client.extensions.appengine.http.UrlFetchTransport; @@ -25,53 +25,84 @@ import com.google.api.client.http.HttpRequestInitializer; import com.google.api.client.http.HttpTransport; import com.google.api.client.http.javanet.NetHttpTransport; +import com.google.auth.http.HttpCredentialsAdapter; import com.google.common.collect.Iterables; import com.google.gcloud.spi.ServiceRpcFactory; import java.io.BufferedReader; import java.io.File; +import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.io.ObjectInputStream; import java.io.ObjectStreamException; import java.io.Serializable; import java.lang.reflect.Method; import java.net.HttpURLConnection; import java.net.URL; +import java.util.Enumeration; import java.util.Locale; import java.util.Objects; import java.util.ServiceLoader; import java.util.Set; +import java.util.jar.Attributes; +import java.util.jar.JarFile; +import java.util.jar.Manifest; import java.util.regex.Matcher; import java.util.regex.Pattern; -public abstract class ServiceOptions< - ServiceRpcT, - OptionsT extends ServiceOptions> - implements Serializable { +/** + * Abstract class representing service options. + * + * @param the service subclass + * @param the spi-layer class corresponding to the service + * @param the {@code ServiceOptions} subclass corresponding to the service + */ +public abstract class ServiceOptions, ServiceRpcT, + OptionsT extends ServiceOptions> implements Serializable { private static final String DEFAULT_HOST = "https://www.googleapis.com"; private static final long serialVersionUID = 1203687993961393350L; private static final String PROJECT_ENV_NAME = "GCLOUD_PROJECT"; + private static final String MANIFEST_ARTIFACT_ID_KEY = "artifactId"; + private static final String MANIFEST_VERSION_KEY = "Implementation-Version"; + private static final String ARTIFACT_ID = "gcloud-java-core"; + private static final String APPLICATION_BASE_NAME = "gcloud-java"; + private static final String APPLICATION_NAME = getApplicationName(); private final String projectId; private final String host; - private final HttpTransportFactory httpTransportFactory; - private final AuthCredentials authCredentials; + private final String httpTransportFactoryClassName; + private final RestorableState authCredentialsState; private final RetryParams retryParams; - private final ServiceRpcFactory serviceRpcFactory; + private final String serviceRpcFactoryClassName; + private final String serviceFactoryClassName; private final int connectTimeout; private final int readTimeout; private final Clock clock; - public interface HttpTransportFactory extends Serializable { + private transient HttpTransportFactory httpTransportFactory; + private transient AuthCredentials authCredentials; + private transient ServiceRpcFactory serviceRpcFactory; + private transient ServiceFactory serviceFactory; + private transient ServiceT service; + private transient ServiceRpcT rpc; + + /** + * A base interface for all {@link HttpTransport} factories. + * + * Implementation must provide a public no-arg constructor. Loading of a factory implementation is + * done via {@link java.util.ServiceLoader}. + */ + public interface HttpTransportFactory { HttpTransport create(); } - private enum DefaultHttpTransportFactory implements HttpTransportFactory { + public static class DefaultHttpTransportFactory implements HttpTransportFactory { - INSTANCE; + private static final HttpTransportFactory INSTANCE = new DefaultHttpTransportFactory(); @Override public HttpTransport create() { @@ -83,12 +114,6 @@ public HttpTransport create() { // Maybe not on App Engine } } - // Consider Compute - try { - return AuthCredentials.getComputeCredential().getTransport(); - } catch (Exception e) { - // Maybe not on GCE - } return new NetHttpTransport(); } } @@ -100,9 +125,9 @@ public HttpTransport create() { * Implementations should implement {@code Serializable} wherever possible and must document * whether or not they do support serialization. */ - public static abstract class Clock { + public abstract static class Clock { - private static ServiceOptions.Clock DEFAULT_TIME_SOURCE = new DefaultClock(); + private static final ServiceOptions.Clock DEFAULT_TIME_SOURCE = new DefaultClock(); /** * Returns current time in milliseconds according to this clock. @@ -132,16 +157,24 @@ private Object readResolve() throws ObjectStreamException { } } - protected abstract static class Builder< - ServiceRpcT, - OptionsT extends ServiceOptions, - B extends Builder> { + /** + * Builder for {@code ServiceOptions}. + * + * @param the service subclass + * @param the spi-layer class corresponding to the service + * @param the {@code ServiceOptions} subclass corresponding to the service + * @param the {@code ServiceOptions} builder + */ + protected abstract static class Builder, ServiceRpcT, + OptionsT extends ServiceOptions, + B extends Builder> { private String projectId; private String host; private HttpTransportFactory httpTransportFactory; private AuthCredentials authCredentials; private RetryParams retryParams; + private ServiceFactory serviceFactory; private ServiceRpcFactory serviceRpcFactory; private int connectTimeout = -1; private int readTimeout = -1; @@ -149,28 +182,40 @@ protected abstract static class Builder< protected Builder() {} - protected Builder(ServiceOptions options) { + protected Builder(ServiceOptions options) { projectId = options.projectId; host = options.host; httpTransportFactory = options.httpTransportFactory; authCredentials = options.authCredentials; retryParams = options.retryParams; + serviceFactory = options.serviceFactory; serviceRpcFactory = options.serviceRpcFactory; + connectTimeout = options.connectTimeout; + readTimeout = options.readTimeout; + clock = options.clock; } - protected abstract ServiceOptions build(); + protected abstract ServiceOptions build(); @SuppressWarnings("unchecked") protected B self() { return (B) this; } + /** + * Sets the service factory. + */ + public B serviceFactory(ServiceFactory serviceFactory) { + this.serviceFactory = serviceFactory; + return self(); + } + /** * Sets the service's clock. The clock is mainly used for testing purpose. {@link Clock} will be * replaced by Java8's {@code java.time.Clock}. * * @param clock the clock to set - * @return the builder. + * @return the builder */ public B clock(Clock clock) { this.clock = clock; @@ -180,18 +225,17 @@ public B clock(Clock clock) { /** * Sets project id. * - * @return the builder. + * @return the builder */ public B projectId(String projectId) { - this.projectId = - checkNotNull(projectId, "Project ID cannot be set to null. Leave unset for default."); + this.projectId = projectId; return self(); } /** * Sets service host. * - * @return the builder. + * @return the builder */ public B host(String host) { this.host = host; @@ -201,7 +245,7 @@ public B host(String host) { /** * Sets the transport factory. * - * @return the builder. + * @return the builder */ public B httpTransportFactory(HttpTransportFactory httpTransportFactory) { this.httpTransportFactory = httpTransportFactory; @@ -211,7 +255,7 @@ public B httpTransportFactory(HttpTransportFactory httpTransportFactory) { /** * Sets the service authentication credentials. * - * @return the builder. + * @return the builder */ public B authCredentials(AuthCredentials authCredentials) { this.authCredentials = authCredentials; @@ -220,9 +264,10 @@ public B authCredentials(AuthCredentials authCredentials) { /** * Sets configuration parameters for request retries. If no configuration is set - * {@link RetryParams#noRetries()} is used. + * {@link RetryParams#defaultInstance()} is used. To disable retries, supply + * {@link RetryParams#noRetries()} here. * - * @return the builder. + * @return the builder */ public B retryParams(RetryParams retryParams) { this.retryParams = retryParams; @@ -243,8 +288,8 @@ public B serviceRpcFactory(ServiceRpcFactory serviceRpcFa * Sets the timeout in milliseconds to establish a connection. * * @param connectTimeout connection timeout in milliseconds. 0 for an infinite timeout, a - * negative number for the default value (20000). - * @return the builder. + * negative number for the default value (20000). + * @return the builder */ public B connectTimeout(int connectTimeout) { this.connectTimeout = connectTimeout; @@ -254,9 +299,9 @@ public B connectTimeout(int connectTimeout) { /** * Sets the timeout in milliseconds to read data from an established connection. * - * @param readTimeout read timeout in milliseconds. 0 for an infinite timeout, a - * negative number for the default value (20000). - * @return the builder. + * @param readTimeout read timeout in milliseconds. 0 for an infinite timeout, a negative number + * for the default value (20000). + * @return the builder */ public B readTimeout(int readTimeout) { this.readTimeout = readTimeout; @@ -264,21 +309,47 @@ public B readTimeout(int readTimeout) { } } - protected ServiceOptions(Builder builder) { - projectId = checkNotNull(builder.projectId != null ? builder.projectId : defaultProject()); + protected ServiceOptions(Class> serviceFactoryClass, + Class> rpcFactoryClass, + Builder builder) { + projectId = builder.projectId != null ? builder.projectId : defaultProject(); + if (projectIdRequired()) { + checkArgument( + projectId != null, + "A project ID is required for this service but could not be determined from the builder " + + "or the environment. Please set a project ID using the builder."); + } host = firstNonNull(builder.host, defaultHost()); - httpTransportFactory = - firstNonNull(builder.httpTransportFactory, DefaultHttpTransportFactory.INSTANCE); - authCredentials = firstNonNull(builder.authCredentials, defaultAuthCredentials()); - retryParams = builder.retryParams; - serviceRpcFactory = builder.serviceRpcFactory; + httpTransportFactory = firstNonNull(builder.httpTransportFactory, + getFromServiceLoader(HttpTransportFactory.class, DefaultHttpTransportFactory.INSTANCE)); + httpTransportFactoryClassName = httpTransportFactory.getClass().getName(); + authCredentials = + builder.authCredentials != null ? builder.authCredentials : defaultAuthCredentials(); + authCredentialsState = authCredentials != null ? authCredentials.capture() : null; + retryParams = firstNonNull(builder.retryParams, RetryParams.defaultInstance()); + serviceFactory = firstNonNull(builder.serviceFactory, + getFromServiceLoader(serviceFactoryClass, defaultServiceFactory())); + serviceFactoryClassName = serviceFactory.getClass().getName(); + serviceRpcFactory = firstNonNull(builder.serviceRpcFactory, + getFromServiceLoader(rpcFactoryClass, defaultRpcFactory())); + serviceRpcFactoryClassName = serviceRpcFactory.getClass().getName(); connectTimeout = builder.connectTimeout; readTimeout = builder.readTimeout; clock = firstNonNull(builder.clock, Clock.defaultClock()); } + /** + * Returns whether a service requires a project ID. This method may be overridden in + * service-specific Options objects. + * + * @return true if a project ID is required to use the service, false if not + */ + protected boolean projectIdRequired() { + return true; + } + private static AuthCredentials defaultAuthCredentials() { - // Consider App Engine. This will not be needed once issue #21 is fixed. + // Consider App Engine. if (appEngineAppId() != null) { try { return AuthCredentials.createForAppEngine(); @@ -290,16 +361,8 @@ private static AuthCredentials defaultAuthCredentials() { try { return AuthCredentials.createApplicationDefaults(); } catch (Exception ex) { - // fallback to old-style - } - - // Consider old-style Compute. This will not be needed once issue #21 is fixed. - try { - return AuthCredentials.createForComputeEngine(); - } catch (Exception ignore) { - // Maybe not on GCE + return null; } - return AuthCredentials.noCredentials(); } protected static String appEngineAppId() { @@ -319,6 +382,49 @@ protected String defaultProject() { } protected static String googleCloudProjectId() { + File configDir; + if (System.getenv().containsKey("CLOUDSDK_CONFIG")) { + configDir = new File(System.getenv("CLOUDSDK_CONFIG")); + } else if (isWindows() && System.getenv().containsKey("APPDATA")) { + configDir = new File(System.getenv("APPDATA"), "gcloud"); + } else { + configDir = new File(System.getProperty("user.home"), ".config/gcloud"); + } + FileReader fileReader = null; + try { + fileReader = new FileReader(new File(configDir, "configurations/config_default")); + } catch (FileNotFoundException newConfigFileNotFoundEx) { + try { + fileReader = new FileReader(new File(configDir, "properties")); + } catch (FileNotFoundException oldConfigFileNotFoundEx) { + // ignore + } + } + if (fileReader != null) { + try (BufferedReader reader = new BufferedReader(fileReader)) { + String line; + String section = null; + Pattern projectPattern = Pattern.compile("^project\\s*=\\s*(.*)$"); + Pattern sectionPattern = Pattern.compile("^\\[(.*)\\]$"); + while ((line = reader.readLine()) != null) { + if (line.isEmpty() || line.startsWith(";")) { + continue; + } + line = line.trim(); + Matcher matcher = sectionPattern.matcher(line); + if (matcher.matches()) { + section = matcher.group(1); + } else if (section == null || section.equals("core")) { + matcher = projectPattern.matcher(line); + if (matcher.matches()) { + return matcher.group(1); + } + } + } + } catch (IOException ex) { + // ignore + } + } try { URL url = new URL("http://metadata/computeMetadata/v1/project/project-id"); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); @@ -332,38 +438,6 @@ protected static String googleCloudProjectId() { } catch (IOException ignore) { // ignore } - File configDir; - if (System.getenv().containsKey("CLOUDSDK_CONFIG")) { - configDir = new File(System.getenv("CLOUDSDK_CONFIG")); - } else if (isWindows() && System.getenv().containsKey("APPDATA")) { - configDir = new File(System.getenv("APPDATA"), "gcloud"); - } else { - configDir = new File(System.getProperty("user.home"), ".config/gcloud"); - } - try (BufferedReader reader = - new BufferedReader(new FileReader(new File(configDir, "properties")))) { - String line; - String section = null; - Pattern projectPattern = Pattern.compile("^project\\s*=\\s*(.*)$"); - Pattern sectionPattern = Pattern.compile("^\\[(.*)\\]$"); - while ((line = reader.readLine()) != null) { - if (line.isEmpty() || line.startsWith(";")) { - continue; - } - line = line.trim(); - Matcher matcher = sectionPattern.matcher(line); - if (matcher.matches()) { - section = matcher.group(1); - } else if (section == null || section.equals("core")) { - matcher = projectPattern.matcher(line); - if (matcher.matches()) { - return matcher.group(1); - } - } - } - } catch (IOException ex) { - // ignore - } // return null if can't determine return null; } @@ -373,14 +447,14 @@ private static boolean isWindows() { } protected static String getAppEngineProjectId() { - // TODO(ozarov): An alternative to reflection would be to depend on AE api jar: - // http://mvnrepository.com/artifact/com.google.appengine/appengine-api-1.0-sdk/1.2.0 try { Class factoryClass = Class.forName("com.google.appengine.api.appidentity.AppIdentityServiceFactory"); + Class serviceClass = + Class.forName("com.google.appengine.api.appidentity.AppIdentityService"); Method method = factoryClass.getMethod("getAppIdentityService"); Object appIdentityService = method.invoke(null); - method = appIdentityService.getClass().getMethod("getServiceAccountName"); + method = serviceClass.getMethod("getServiceAccountName"); String serviceAccountName = (String) method.invoke(appIdentityService); int indexOfAtSign = serviceAccountName.indexOf('@'); return serviceAccountName.substring(0, indexOfAtSign); @@ -390,10 +464,26 @@ protected static String getAppEngineProjectId() { } } - protected abstract Set scopes(); + @SuppressWarnings("unchecked") + public ServiceT service() { + if (service == null) { + service = serviceFactory.create((OptionsT) this); + } + return service; + } + + @SuppressWarnings("unchecked") + public ServiceRpcT rpc() { + if (rpc == null) { + rpc = serviceRpcFactory.create((OptionsT) this); + } + return rpc; + } /** - * Returns the project id. + * Returns the project id. + * + * Return value can be null (for services that don't require a project id). */ public String projectId() { return projectId; @@ -421,18 +511,11 @@ public AuthCredentials authCredentials() { } /** - * Returns configuration parameters for request retries. By default requests are not retried: - * {@link RetryParams#noRetries()} is used. + * Returns configuration parameters for request retries. By default requests are retried: + * {@link RetryParams#defaultInstance()} is used. */ public RetryParams retryParams() { - return retryParams != null ? retryParams : RetryParams.noRetries(); - } - - /** - * Returns the factory for rpc services. - */ - public ServiceRpcFactory serviceRpcFactory() { - return serviceRpcFactory; + return retryParams; } /** @@ -440,13 +523,15 @@ public ServiceRpcFactory serviceRpcFactory() { * options. */ public HttpRequestInitializer httpRequestInitializer() { - HttpTransport httpTransport = httpTransportFactory.create(); - final HttpRequestInitializer baseRequestInitializer = - authCredentials().httpRequestInitializer(httpTransport, scopes()); + final HttpRequestInitializer delegate = authCredentials() != null + ? new HttpCredentialsAdapter(authCredentials().credentials().createScoped(scopes())) + : null; return new HttpRequestInitializer() { @Override public void initialize(HttpRequest httpRequest) throws IOException { - baseRequestInitializer.initialize(httpRequest); + if (delegate != null) { + delegate.initialize(httpRequest); + } if (connectTimeout >= 0) { httpRequest.setConnectTimeout(connectTimeout); } @@ -474,41 +559,84 @@ public int readTimeout() { } /** - * Returns the service's clock. Default time source uses {@link System#currentTimeMillis()} to - * get current time. + * Returns the service's clock. Default time source uses {@link System#currentTimeMillis()} to get + * current time. */ public Clock clock() { return clock; } + /** + * Returns the application's name as a string in the format {@code gcloud-java/[version]}. + */ + public String applicationName() { + return APPLICATION_NAME; + } + protected int baseHashCode() { - return Objects.hash(projectId, host, httpTransportFactory, authCredentials, retryParams, - serviceRpcFactory, connectTimeout, readTimeout, clock); + return Objects.hash(projectId, host, httpTransportFactoryClassName, authCredentialsState, + retryParams, serviceFactoryClassName, serviceRpcFactoryClassName, connectTimeout, + readTimeout, clock); } - protected boolean baseEquals(ServiceOptions other) { + protected boolean baseEquals(ServiceOptions other) { return Objects.equals(projectId, other.projectId) && Objects.equals(host, other.host) - && Objects.equals(httpTransportFactory, other.httpTransportFactory) - && Objects.equals(authCredentials, other.authCredentials) + && Objects.equals(httpTransportFactoryClassName, other.httpTransportFactoryClassName) + && Objects.equals(authCredentialsState, other.authCredentialsState) && Objects.equals(retryParams, other.retryParams) - && Objects.equals(serviceRpcFactory, other.serviceRpcFactory) + && Objects.equals(serviceFactoryClassName, other.serviceFactoryClassName) + && Objects.equals(serviceRpcFactoryClassName, other.serviceRpcFactoryClassName) && Objects.equals(connectTimeout, other.connectTimeout) && Objects.equals(readTimeout, other.readTimeout) && Objects.equals(clock, clock); } - public abstract Builder toBuilder(); + private void readObject(ObjectInputStream input) throws IOException, ClassNotFoundException { + input.defaultReadObject(); + httpTransportFactory = newInstance(httpTransportFactoryClassName); + serviceFactory = newInstance(serviceFactoryClassName); + serviceRpcFactory = newInstance(serviceRpcFactoryClassName); + authCredentials = authCredentialsState != null ? authCredentialsState.restore() : null; + } + + @SuppressWarnings("unchecked") + private static T newInstance(String className) throws IOException, ClassNotFoundException { + try { + return (T) Class.forName(className).newInstance(); + } catch (InstantiationException | IllegalAccessException e) { + throw new IOException(e); + } + } - /** - * Creates a service RPC using a factory loaded by {@link ServiceLoader}. - */ - protected static - > - ServiceRpcT createRpc(OptionsT options, - Class> factoryClass) { - ServiceRpcFactory factory = - Iterables.getFirst(ServiceLoader.load(factoryClass), null); - return factory == null ? null : factory.create(options); + protected abstract > T defaultServiceFactory(); + + protected abstract > T defaultRpcFactory(); + + protected abstract Set scopes(); + + public abstract > B toBuilder(); + + private static T getFromServiceLoader(Class clazz, T defaultInstance) { + return Iterables.getFirst(ServiceLoader.load(clazz), defaultInstance); + } + + private static String getApplicationName() { + String version = null; + try { + Enumeration resources = + ServiceOptions.class.getClassLoader().getResources(JarFile.MANIFEST_NAME); + while (resources.hasMoreElements() && version == null) { + Manifest manifest = new Manifest(resources.nextElement().openStream()); + Attributes manifestAttributes = manifest.getMainAttributes(); + String artifactId = manifestAttributes.getValue(MANIFEST_ARTIFACT_ID_KEY); + if (artifactId != null && artifactId.equals(ARTIFACT_ID)) { + version = manifestAttributes.getValue(MANIFEST_VERSION_KEY); + } + } + } catch (IOException e) { + // ignore + } + return version != null ? APPLICATION_BASE_NAME + "/" + version : APPLICATION_BASE_NAME; } } diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/WriteChannel.java b/gcloud-java-core/src/main/java/com/google/gcloud/WriteChannel.java new file mode 100644 index 000000000000..e6f06e23dc04 --- /dev/null +++ b/gcloud-java-core/src/main/java/com/google/gcloud/WriteChannel.java @@ -0,0 +1,48 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import java.io.Closeable; +import java.nio.channels.WritableByteChannel; + +/** + * A channel for writing data to Google Cloud services. + * + *

Implementations of this class may further buffer data internally to reduce remote calls. + * Written data will only be visible after calling {@link #close()}. This interface implements + * {@link Restorable} to allow saving the writer's state to continue writing afterwards. + *

+ */ +public interface WriteChannel extends WritableByteChannel, Closeable, Restorable { + + /** + * Sets the minimum size that will be written by a single RPC. + * Written data will be buffered and only flushed upon reaching this size or closing the channel. + */ + void chunkSize(int chunkSize); + + /** + * Captures the write channel state so that it can be saved and restored afterwards. The original + * {@code WriteChannel} and the restored one should not both be used. Closing one channel + * causes the other channel to close; subsequent writes will fail. + * + * @return a {@link RestorableState} object that contains the write channel state and can restore + * it afterwards. + */ + @Override + RestorableState capture(); +} diff --git a/gcloud-java-core/src/main/java/com/google/gcloud/spi/ServiceRpcFactory.java b/gcloud-java-core/src/main/java/com/google/gcloud/spi/ServiceRpcFactory.java index 89e08cda9eda..d19f6047e4b2 100644 --- a/gcloud-java-core/src/main/java/com/google/gcloud/spi/ServiceRpcFactory.java +++ b/gcloud-java-core/src/main/java/com/google/gcloud/spi/ServiceRpcFactory.java @@ -18,16 +18,14 @@ import com.google.gcloud.ServiceOptions; -import java.io.Serializable; - /** * A base interface for all service RPC factories. + * + * Implementation must provide a public no-arg constructor. * Loading of a factory implementation is done via {@link java.util.ServiceLoader}. */ -public interface ServiceRpcFactory< - ServiceRpcT, - OptionsT extends ServiceOptions> - extends Serializable { +@SuppressWarnings("rawtypes") +public interface ServiceRpcFactory { ServiceRpcT create(OptionsT options); } diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/BaseServiceExceptionTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/BaseServiceExceptionTest.java new file mode 100644 index 000000000000..e3c6abb7d1ee --- /dev/null +++ b/gcloud-java-core/src/test/java/com/google/gcloud/BaseServiceExceptionTest.java @@ -0,0 +1,155 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import static com.google.gcloud.BaseServiceException.UNKNOWN_CODE; +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.api.client.googleapis.json.GoogleJsonError; +import com.google.common.collect.ImmutableSet; + +import org.junit.Test; + +import java.io.IOException; +import java.net.SocketTimeoutException; +import java.util.Set; + +/** + * Tests for {@link BaseServiceException}. + */ +public class BaseServiceExceptionTest { + + private static final int CODE = 1; + private static final int CODE_NO_REASON = 2; + private static final String MESSAGE = "some message"; + private static final String REASON = "some reason"; + private static final boolean RETRYABLE = true; + private static final boolean IDEMPOTENT = true; + private static class CustomServiceException extends BaseServiceException { + + private static final long serialVersionUID = -195251309124875103L; + + public CustomServiceException(int code, String message, String reason, boolean idempotent) { + super(code, message, reason, idempotent); + } + + @Override + protected Set retryableErrors() { + return ImmutableSet.of(new Error(CODE, REASON), new Error(null, REASON), + new Error(CODE_NO_REASON, null)); + } + } + + @Test + public void testBaseServiceException() { + BaseServiceException serviceException = new BaseServiceException(CODE, MESSAGE, REASON, + IDEMPOTENT); + assertEquals(CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertEquals(REASON, serviceException.reason()); + assertFalse(serviceException.retryable()); + assertEquals(IDEMPOTENT, serviceException.idempotent()); + assertNull(serviceException.getCause()); + + serviceException = new BaseServiceException(CODE, MESSAGE, REASON, IDEMPOTENT); + assertEquals(CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertEquals(REASON, serviceException.reason()); + assertFalse(serviceException.retryable()); + assertEquals(IDEMPOTENT, serviceException.idempotent()); + assertNull(serviceException.getCause()); + + Exception cause = new RuntimeException(); + serviceException = new BaseServiceException(CODE, MESSAGE, REASON, IDEMPOTENT, cause); + assertEquals(CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertEquals(REASON, serviceException.reason()); + assertFalse(serviceException.retryable()); + assertEquals(IDEMPOTENT, serviceException.idempotent()); + assertEquals(cause, serviceException.getCause()); + + serviceException = new BaseServiceException(CODE, MESSAGE, REASON, false, cause); + assertEquals(CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertEquals(REASON, serviceException.reason()); + assertFalse(serviceException.retryable()); + assertFalse(serviceException.idempotent()); + assertEquals(cause, serviceException.getCause()); + + IOException exception = new SocketTimeoutException(); + serviceException = new BaseServiceException(exception, true); + assertTrue(serviceException.retryable()); + assertTrue(serviceException.idempotent()); + assertEquals(exception, serviceException.getCause()); + + GoogleJsonError error = new GoogleJsonError(); + error.setCode(CODE); + error.setMessage(MESSAGE); + serviceException = new BaseServiceException(error, true); + assertEquals(CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertFalse(serviceException.retryable()); + assertTrue(serviceException.idempotent()); + + serviceException = new CustomServiceException(CODE, MESSAGE, REASON, IDEMPOTENT); + assertEquals(CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertEquals(REASON, serviceException.reason()); + assertEquals(RETRYABLE, serviceException.retryable()); + assertEquals(IDEMPOTENT, serviceException.idempotent()); + + serviceException = new CustomServiceException(CODE_NO_REASON, MESSAGE, null, IDEMPOTENT); + assertEquals(CODE_NO_REASON, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertNull(serviceException.reason()); + assertEquals(RETRYABLE, serviceException.retryable()); + assertEquals(IDEMPOTENT, serviceException.idempotent()); + + serviceException = new CustomServiceException(UNKNOWN_CODE, MESSAGE, REASON, IDEMPOTENT); + assertEquals(UNKNOWN_CODE, serviceException.code()); + assertEquals(MESSAGE, serviceException.getMessage()); + assertEquals(REASON, serviceException.reason()); + assertEquals(RETRYABLE, serviceException.retryable()); + assertEquals(IDEMPOTENT, serviceException.idempotent()); + } + + @Test + public void testTranslateAndThrow() throws Exception { + BaseServiceException cause = new BaseServiceException(CODE, MESSAGE, REASON, IDEMPOTENT); + RetryHelper.RetryHelperException exceptionMock = + createMock(RetryHelper.RetryHelperException.class); + expect(exceptionMock.getCause()).andReturn(cause).times(2); + replay(exceptionMock); + try { + BaseServiceException.translateAndPropagateIfPossible(exceptionMock); + } catch (BaseServiceException ex) { + assertEquals(CODE, ex.code()); + assertEquals(MESSAGE, ex.getMessage()); + assertFalse(ex.retryable()); + assertEquals(IDEMPOTENT, ex.idempotent()); + } finally { + verify(exceptionMock); + } + } +} diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/BaseWriteChannelTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/BaseWriteChannelTest.java new file mode 100644 index 000000000000..e49a17b019e0 --- /dev/null +++ b/gcloud-java-core/src/test/java/com/google/gcloud/BaseWriteChannelTest.java @@ -0,0 +1,144 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import static junit.framework.TestCase.assertFalse; +import static junit.framework.TestCase.assertTrue; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import com.google.gcloud.spi.ServiceRpcFactory; + +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.io.IOException; +import java.io.Serializable; +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Random; + +public class BaseWriteChannelTest { + + private abstract static class CustomService implements Service {} + private abstract static class CustomServiceOptions + extends ServiceOptions { + + private static final long serialVersionUID = 3302358029307467197L; + + protected CustomServiceOptions( + Class> serviceFactoryClass, + Class> rpcFactoryClass, + Builder builder) { + super(serviceFactoryClass, rpcFactoryClass, builder); + } + } + + private static final Serializable ENTITY = 42L; + private static final String UPLOAD_ID = "uploadId"; + private static final byte[] CONTENT = {0xD, 0xE, 0xA, 0xD}; + private static final int MIN_CHUNK_SIZE = 256 * 1024; + private static final int DEFAULT_CHUNK_SIZE = 8 * MIN_CHUNK_SIZE; + private static final Random RANDOM = new Random(); + private static BaseWriteChannel channel; + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @Before + public void setUp() { + channel = new BaseWriteChannel(null, ENTITY, UPLOAD_ID) { + @Override + public RestorableState capture() { + return null; + } + + @Override + protected void flushBuffer(int length, boolean last) {} + + @Override + protected BaseState.Builder stateBuilder() { + return null; + } + }; + } + + @Test + public void testConstructor() throws IOException { + assertEquals(null, channel.options()); + assertEquals(ENTITY, channel.entity()); + assertEquals(0, channel.position()); + assertEquals(UPLOAD_ID, channel.uploadId()); + assertEquals(0, channel.limit()); + assertTrue(channel.isOpen()); + assertArrayEquals(new byte[0], channel.buffer()); + assertEquals(DEFAULT_CHUNK_SIZE, channel.chunkSize()); + } + + @Test + public void testClose() throws IOException { + channel.close(); + assertFalse(channel.isOpen()); + assertNull(channel.buffer()); + } + + @Test + public void testValidateOpen() throws IOException { + channel.close(); + thrown.expect(IOException.class); + thrown.expectMessage("stream is closed"); + channel.write(ByteBuffer.allocate(42)); + } + + @Test + public void testChunkSize() throws IOException { + channel.chunkSize(42); + assertEquals(MIN_CHUNK_SIZE, channel.chunkSize()); + channel.chunkSize(2 * MIN_CHUNK_SIZE); + assertEquals(2 * MIN_CHUNK_SIZE, channel.chunkSize()); + channel.chunkSize(512 * 1025); + assertEquals(2 * MIN_CHUNK_SIZE, channel.chunkSize()); + } + + @Test + public void testWrite() throws IOException { + channel.write(ByteBuffer.wrap(CONTENT)); + assertEquals(CONTENT.length, channel.limit()); + assertEquals(DEFAULT_CHUNK_SIZE, channel.buffer().length); + assertArrayEquals(Arrays.copyOf(CONTENT, DEFAULT_CHUNK_SIZE), channel.buffer()); + } + + @Test + public void testWriteAndFlush() throws IOException { + ByteBuffer content = randomBuffer(DEFAULT_CHUNK_SIZE + 1); + channel.write(content); + assertEquals(DEFAULT_CHUNK_SIZE, channel.position()); + assertEquals(1, channel.limit()); + byte[] newContent = new byte[DEFAULT_CHUNK_SIZE]; + newContent[0] = content.get(DEFAULT_CHUNK_SIZE); + assertArrayEquals(newContent, channel.buffer()); + } + + private static ByteBuffer randomBuffer(int size) { + byte[] byteArray = new byte[size]; + RANDOM.nextBytes(byteArray); + return ByteBuffer.wrap(byteArray); + } +} diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/ExceptionHandlerTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/ExceptionHandlerTest.java index 5ce05ad900a8..cedc995ddbd0 100644 --- a/gcloud-java-core/src/test/java/com/google/gcloud/ExceptionHandlerTest.java +++ b/gcloud-java-core/src/test/java/com/google/gcloud/ExceptionHandlerTest.java @@ -23,9 +23,9 @@ import com.google.gcloud.ExceptionHandler.Interceptor; import com.google.gcloud.ExceptionHandler.Interceptor.RetryResult; -import org.junit.rules.ExpectedException; import org.junit.Rule; import org.junit.Test; +import org.junit.rules.ExpectedException; import java.io.FileNotFoundException; import java.io.IOException; @@ -82,7 +82,7 @@ public Object call() throws Error { } // using default exception handler (retry upon any non-runtime exceptions) - ExceptionHandler handler = ExceptionHandler.getDefaultInstance(); + ExceptionHandler handler = ExceptionHandler.defaultInstance(); assertValidCallable(new A(), handler); assertValidCallable(new B(), handler); assertValidCallable(new C(), handler); diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/PageImplTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/PageImplTest.java new file mode 100644 index 000000000000..4389171fb49c --- /dev/null +++ b/gcloud-java-core/src/test/java/com/google/gcloud/PageImplTest.java @@ -0,0 +1,61 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +public class PageImplTest { + + private static final ImmutableList VALUES = ImmutableList.of("1", "2"); + private static final ImmutableList NEXT_VALUES = ImmutableList.of("3", "4"); + private static final ImmutableList ALL_VALUES = ImmutableList.builder() + .addAll(VALUES) + .addAll(NEXT_VALUES) + .build(); + + @Test + public void testPage() { + final PageImpl nextResult = new PageImpl<>(null, "c", NEXT_VALUES); + PageImpl.NextPageFetcher fetcher = new PageImpl.NextPageFetcher() { + @Override + public PageImpl nextPage() { + return nextResult; + } + }; + PageImpl result = new PageImpl<>(fetcher, "c", VALUES); + assertEquals(nextResult, result.nextPage()); + assertEquals("c", result.nextPageCursor()); + assertEquals(VALUES, result.values()); + } + + @Test + public void testIterateAll() { + final PageImpl nextResult = new PageImpl<>(null, "c", NEXT_VALUES); + PageImpl.NextPageFetcher fetcher = new PageImpl.NextPageFetcher() { + @Override + public PageImpl nextPage() { + return nextResult; + } + }; + PageImpl result = new PageImpl<>(fetcher, "c", VALUES); + assertEquals(ALL_VALUES, ImmutableList.copyOf(result.iterateAll())); + } +} diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/RetryHelperTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/RetryHelperTest.java index dfd933bcae46..9a7cc2104f4a 100644 --- a/gcloud-java-core/src/test/java/com/google/gcloud/RetryHelperTest.java +++ b/gcloud-java-core/src/test/java/com/google/gcloud/RetryHelperTest.java @@ -118,13 +118,13 @@ public void testTriesAtLeastMinTimes() { @Override public Integer call() throws IOException { timesCalled++; assertEquals(timesCalled, RetryHelper.getContext().getAttemptNumber()); - assertEquals(10, RetryHelper.getContext().getRetryParams().getRetryMaxAttempts()); + assertEquals(10, RetryHelper.getContext().getRetryParams().retryMaxAttempts()); if (timesCalled <= timesToFail) { throw new IOException(); } return timesCalled; } - }, params, ExceptionHandler.getDefaultInstance()); + }, params, ExceptionHandler.defaultInstance()); assertEquals(timesToFail + 1, attempted); assertNull(RetryHelper.getContext()); } diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/RetryParamsTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/RetryParamsTest.java index d1d5e3c076d8..eae44693929b 100644 --- a/gcloud-java-core/src/test/java/com/google/gcloud/RetryParamsTest.java +++ b/gcloud-java-core/src/test/java/com/google/gcloud/RetryParamsTest.java @@ -41,15 +41,15 @@ public class RetryParamsTest { @Test public void testDefaults() { - RetryParams params1 = RetryParams.getDefaultInstance(); + RetryParams params1 = RetryParams.defaultInstance(); RetryParams params2 = RetryParams.builder().build(); for (RetryParams params : Arrays.asList(params1, params2)) { - assertEquals(DEFAULT_INITIAL_RETRY_DELAY_MILLIS, params.getInitialRetryDelayMillis()); - assertEquals(DEFAULT_MAX_RETRY_DELAY_MILLIS, params.getMaxRetryDelayMillis()); - assertEquals(DEFAULT_RETRY_DELAY_BACKOFF_FACTOR, params.getRetryDelayBackoffFactor(), 0); - assertEquals(DEFAULT_RETRY_MAX_ATTEMPTS, params.getRetryMaxAttempts()); - assertEquals(DEFAULT_RETRY_MIN_ATTEMPTS, params.getRetryMinAttempts()); - assertEquals(DEFAULT_TOTAL_RETRY_PERIOD_MILLIS, params.getTotalRetryPeriodMillis()); + assertEquals(DEFAULT_INITIAL_RETRY_DELAY_MILLIS, params.initialRetryDelayMillis()); + assertEquals(DEFAULT_MAX_RETRY_DELAY_MILLIS, params.maxRetryDelayMillis()); + assertEquals(DEFAULT_RETRY_DELAY_BACKOFF_FACTOR, params.retryDelayBackoffFactor(), 0); + assertEquals(DEFAULT_RETRY_MAX_ATTEMPTS, params.retryMaxAttempts()); + assertEquals(DEFAULT_RETRY_MIN_ATTEMPTS, params.retryMinAttempts()); + assertEquals(DEFAULT_TOTAL_RETRY_PERIOD_MILLIS, params.totalRetryPeriodMillis()); } } @@ -65,12 +65,12 @@ public void testSetAndCopy() { RetryParams params1 = builder.build(); RetryParams params2 = new RetryParams.Builder(params1).build(); for (RetryParams params : Arrays.asList(params1, params2)) { - assertEquals(101, params.getInitialRetryDelayMillis()); - assertEquals(102, params.getMaxRetryDelayMillis()); - assertEquals(103, params.getRetryDelayBackoffFactor(), 0); - assertEquals(107, params.getRetryMinAttempts()); - assertEquals(108, params.getRetryMaxAttempts()); - assertEquals(109, params.getTotalRetryPeriodMillis()); + assertEquals(101, params.initialRetryDelayMillis()); + assertEquals(102, params.maxRetryDelayMillis()); + assertEquals(103, params.retryDelayBackoffFactor(), 0); + assertEquals(107, params.retryMinAttempts()); + assertEquals(108, params.retryMaxAttempts()); + assertEquals(109, params.totalRetryPeriodMillis()); } } @@ -79,19 +79,19 @@ public void testBadSettings() { RetryParams.Builder builder = RetryParams.builder(); builder.initialRetryDelayMillis(-1); builder = assertFailure(builder); - builder.maxRetryDelayMillis(RetryParams.getDefaultInstance().getInitialRetryDelayMillis() - 1); + builder.maxRetryDelayMillis(RetryParams.defaultInstance().initialRetryDelayMillis() - 1); builder = assertFailure(builder); builder.retryDelayBackoffFactor(-1); builder = assertFailure(builder); builder.retryMinAttempts(-1); builder = assertFailure(builder); - builder.retryMaxAttempts(RetryParams.getDefaultInstance().getRetryMinAttempts() - 1); + builder.retryMaxAttempts(RetryParams.defaultInstance().retryMinAttempts() - 1); builder = assertFailure(builder); builder.totalRetryPeriodMillis(-1); builder = assertFailure(builder); // verify that it is OK for min and max to be equal - builder.retryMaxAttempts(RetryParams.getDefaultInstance().getRetryMinAttempts()); - builder.maxRetryDelayMillis(RetryParams.getDefaultInstance().getInitialRetryDelayMillis()); + builder.retryMaxAttempts(RetryParams.defaultInstance().retryMinAttempts()); + builder.maxRetryDelayMillis(RetryParams.defaultInstance().initialRetryDelayMillis()); builder.build(); } diff --git a/gcloud-java-core/src/test/java/com/google/gcloud/ServiceOptionsTest.java b/gcloud-java-core/src/test/java/com/google/gcloud/ServiceOptionsTest.java new file mode 100644 index 000000000000..d0e3db2d2a55 --- /dev/null +++ b/gcloud-java-core/src/test/java/com/google/gcloud/ServiceOptionsTest.java @@ -0,0 +1,241 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import com.google.gcloud.ServiceOptions.Clock; +import com.google.gcloud.ServiceOptions.DefaultHttpTransportFactory; +import com.google.gcloud.ServiceOptions.HttpTransportFactory; +import com.google.gcloud.spi.ServiceRpcFactory; + +import org.easymock.EasyMock; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.JUnit4; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Set; + +@RunWith(JUnit4.class) +public class ServiceOptionsTest { + private static final String JSON_KEY = + "{\n" + + " \"private_key_id\": \"somekeyid\",\n" + + " \"private_key\": \"-----BEGIN PRIVATE KEY-----\\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggS" + + "kAgEAAoIBAQC+K2hSuFpAdrJI\\nnCgcDz2M7t7bjdlsadsasad+fvRSW6TjNQZ3p5LLQY1kSZRqBqylRkzteMOyHg" + + "aR\\n0Pmxh3ILCND5men43j3h4eDbrhQBuxfEMalkG92sL+PNQSETY2tnvXryOvmBRwa/\\nQP/9dJfIkIDJ9Fw9N4" + + "Bhhhp6mCcRpdQjV38H7JsyJ7lih/oNjECgYAt\\nknddadwkwewcVxHFhcZJO+XWf6ofLUXpRwiTZakGMn8EE1uVa2" + + "LgczOjwWHGi99MFjxSer5m9\\n1tCa3/KEGKiS/YL71JvjwX3mb+cewlkcmweBKZHM2JPTk0ZednFSpVZMtycjkbLa" + + "\\ndYOS8V85AgMBewECggEBAKksaldajfDZDV6nGqbFjMiizAKJolr/M3OQw16K6o3/\\n0S31xIe3sSlgW0+UbYlF" + + "4U8KifhManD1apVSC3csafaspP4RZUHFhtBywLO9pR5c\\nr6S5aLp+gPWFyIp1pfXbWGvc5VY/v9x7ya1VEa6rXvL" + + "sKupSeWAW4tMj3eo/64ge\\nsdaceaLYw52KeBYiT6+vpsnYrEkAHO1fF/LavbLLOFJmFTMxmsNaG0tuiJHgjshB\\" + + "n82DpMCbXG9YcCgI/DbzuIjsdj2JC1cascSP//3PmefWysucBQe7Jryb6NQtASmnv\\nCdDw/0jmZTEjpe4S1lxfHp" + + "lAhHFtdgYTvyYtaLZiVVkCgYEA8eVpof2rceecw/I6\\n5ng1q3Hl2usdWV/4mZMvR0fOemacLLfocX6IYxT1zA1FF" + + "JlbXSRsJMf/Qq39mOR2\\nSpW+hr4jCoHeRVYLgsbggtrevGmILAlNoqCMpGZ6vDmJpq6ECV9olliDvpPgWOP+\\nm" + + "YPDreFBGxWvQrADNbRt2dmGsrsCgYEAyUHqB2wvJHFqdmeBsaacewzV8x9WgmeX\\ngUIi9REwXlGDW0Mz50dxpxcK" + + "CAYn65+7TCnY5O/jmL0VRxU1J2mSWyWTo1C+17L0\\n3fUqjxL1pkefwecxwecvC+gFFYdJ4CQ/MHHXU81Lwl1iWdF" + + "Cd2UoGddYaOF+KNeM\\nHC7cmqra+JsCgYEAlUNywzq8nUg7282E+uICfCB0LfwejuymR93CtsFgb7cRd6ak\\nECR" + + "8FGfCpH8ruWJINllbQfcHVCX47ndLZwqv3oVFKh6pAS/vVI4dpOepP8++7y1u\\ncoOvtreXCX6XqfrWDtKIvv0vjl" + + "HBhhhp6mCcRpdQjV38H7JsyJ7lih/oNjECgYAt\\nkndj5uNl5SiuVxHFhcZJO+XWf6ofLUregtevZakGMn8EE1uVa" + + "2AY7eafmoU/nZPT\\n00YB0TBATdCbn/nBSuKDESkhSg9s2GEKQZG5hBmL5uCMfo09z3SfxZIhJdlerreP\\nJ7gSi" + + "dI12N+EZxYd4xIJh/HFDgp7RRO87f+WJkofMQKBgGTnClK1VMaCRbJZPriw\\nEfeFCoOX75MxKwXs6xgrw4W//AYG" + + "GUjDt83lD6AZP6tws7gJ2IwY/qP7+lyhjEqN\\nHtfPZRGFkGZsdaksdlaksd323423d+15/UvrlRSFPNj1tWQmNKk" + + "XyRDW4IG1Oa2p\\nrALStNBx5Y9t0/LQnFI4w3aG\\n-----END PRIVATE KEY-----\\n\",\n" + + " \"client_email\": \"someclientid@developer.gserviceaccount.com\",\n" + + " \"client_id\": \"someclientid.apps.googleusercontent.com\",\n" + + " \"type\": \"service_account\"\n" + + "}"; + private static final InputStream JSON_KEY_STREAM = new ByteArrayInputStream(JSON_KEY.getBytes()); + private static AuthCredentials authCredentials; + static { + try { + authCredentials = AuthCredentials.createForJson(JSON_KEY_STREAM); + } catch (IOException e) { + fail("Couldn't create fake JSON credentials."); + } + } + private static final HttpTransportFactory MOCK_HTTP_TRANSPORT_FACTORY = + EasyMock.createMock(HttpTransportFactory.class); + private static final Clock TEST_CLOCK = new TestClock(); + private static final TestServiceOptions OPTIONS = + TestServiceOptions.builder() + .authCredentials(authCredentials) + .clock(TEST_CLOCK) + .connectTimeout(1234) + .host("host") + .httpTransportFactory(MOCK_HTTP_TRANSPORT_FACTORY) + .projectId("project-id") + .readTimeout(5678) + .retryParams(RetryParams.noRetries()) + .build(); + private static final TestServiceOptions DEFAULT_OPTIONS = + TestServiceOptions.builder().projectId("project-id").build(); + private static final TestServiceOptions OPTIONS_COPY = OPTIONS.toBuilder().build(); + + private static class TestClock extends Clock { + @Override + public long millis() { + return 123456789L; + } + } + + private interface TestService extends Service {} + + private static class TestServiceImpl + extends BaseService implements TestService { + private TestServiceImpl(TestServiceOptions options) { + super(options); + } + } + + private interface TestServiceFactory extends ServiceFactory {} + + private static class DefaultTestServiceFactory implements TestServiceFactory { + private static final TestServiceFactory INSTANCE = new DefaultTestServiceFactory(); + + @Override + public TestService create(TestServiceOptions options) { + return new TestServiceImpl(options); + } + } + + private interface TestServiceRpcFactory + extends ServiceRpcFactory {} + + private static class DefaultTestServiceRpcFactory implements TestServiceRpcFactory { + private static final TestServiceRpcFactory INSTANCE = new DefaultTestServiceRpcFactory(); + + @Override + public TestServiceRpc create(TestServiceOptions options) { + return new DefaultTestServiceRpc(options); + } + } + + private interface TestServiceRpc {} + + private static class DefaultTestServiceRpc implements TestServiceRpc { + DefaultTestServiceRpc(TestServiceOptions options) {} + } + + private static class TestServiceOptions + extends ServiceOptions { + private static class Builder + extends ServiceOptions.Builder { + private Builder() {} + + private Builder(TestServiceOptions options) { + super(options); + } + + @Override + protected TestServiceOptions build() { + return new TestServiceOptions(this); + } + } + + private TestServiceOptions(Builder builder) { + super(TestServiceFactory.class, TestServiceRpcFactory.class, builder); + } + + @Override + protected TestServiceFactory defaultServiceFactory() { + return DefaultTestServiceFactory.INSTANCE; + } + + @Override + protected TestServiceRpcFactory defaultRpcFactory() { + return DefaultTestServiceRpcFactory.INSTANCE; + } + + @Override + protected Set scopes() { + return null; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + private static Builder builder() { + return new Builder(); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof TestServiceOptions && baseEquals((TestServiceOptions) obj); + } + + @Override + public int hashCode() { + return baseHashCode(); + } + } + + @Test + public void testBuilder() { + assertSame(authCredentials, OPTIONS.authCredentials()); + assertSame(TEST_CLOCK, OPTIONS.clock()); + assertEquals(1234, OPTIONS.connectTimeout()); + assertEquals("host", OPTIONS.host()); + assertSame(MOCK_HTTP_TRANSPORT_FACTORY, OPTIONS.httpTransportFactory()); + assertEquals("project-id", OPTIONS.projectId()); + assertEquals(5678, OPTIONS.readTimeout()); + assertSame(RetryParams.noRetries(), OPTIONS.retryParams()); + + assertSame(Clock.defaultClock(), DEFAULT_OPTIONS.clock()); + assertEquals(-1, DEFAULT_OPTIONS.connectTimeout()); + assertEquals("https://www.googleapis.com", DEFAULT_OPTIONS.host()); + assertTrue(DEFAULT_OPTIONS.httpTransportFactory() instanceof DefaultHttpTransportFactory); + assertEquals(-1, DEFAULT_OPTIONS.readTimeout()); + assertSame(RetryParams.defaultInstance(), DEFAULT_OPTIONS.retryParams()); + } + + @Test + public void testGetProjectIdRequired() { + assertTrue(OPTIONS.projectIdRequired()); + } + + @Test + public void testService() { + assertTrue(OPTIONS.service() instanceof TestServiceImpl); + } + + @Test + public void testRpc() { + assertTrue(OPTIONS.rpc() instanceof DefaultTestServiceRpc); + } + + @Test + public void testBaseEquals() { + assertEquals(OPTIONS, OPTIONS_COPY); + assertNotEquals(DEFAULT_OPTIONS, OPTIONS); + } + + @Test + public void testBaseHashCode() { + assertEquals(OPTIONS.hashCode(), OPTIONS_COPY.hashCode()); + assertNotEquals(DEFAULT_OPTIONS.hashCode(), OPTIONS.hashCode()); + } +} diff --git a/gcloud-java-datastore/README.md b/gcloud-java-datastore/README.md index f0dca1777fe8..7eae00f2ad3f 100644 --- a/gcloud-java-datastore/README.md +++ b/gcloud-java-datastore/README.md @@ -15,14 +15,22 @@ Java idiomatic client for [Google Cloud Datastore] (https://cloud.google.com/dat Quickstart ---------- -Add this to your pom.xml file +If you are using Maven, add this to your pom.xml file ```xml com.google.gcloud gcloud-java-datastore - 0.0.10 + 0.1.3 ``` +If you are using Gradle, add this to your dependencies +```Groovy +compile 'com.google.gcloud:gcloud-java-datastore:0.1.3' +``` +If you are using SBT, add this to your dependencies +```Scala +libraryDependencies += "com.google.gcloud" % "gcloud-java-datastore" % "0.1.3" +``` Example Application -------------------- @@ -36,7 +44,7 @@ See the [Authentication](https://github.com/GoogleCloudPlatform/gcloud-java#auth About Google Cloud Datastore ---------------------------- -Google [Cloud Datastore][cloud-datastore] is a fully managed, schemaless database for +Google [Cloud Datastore][cloud-datastore-docs] is a fully managed, schemaless database for storing non-relational data. Cloud Datastore automatically scales with your users and supports ACID transactions, high availability of reads and writes, strong consistency for reads and ancestor queries, and eventual @@ -48,37 +56,151 @@ Cloud Datastore for your project. See the ``gcloud-java`` API [datastore documentation][datastore-api] to learn how to interact with the Cloud Datastore using this Client Library. -Here is a code snippet showing a simple usage example from within Compute/App Engine. Note that you must [supply credentials](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) and a project ID if running this snippet elsewhere. +Getting Started +--------------- +#### Prerequisites +For this tutorial, you will need a [Google Developers Console](https://console.developers.google.com/) project with the Datastore API enabled. [Follow these instructions](https://cloud.google.com/docs/authentication#preparation) to get your project set up. You will also need to set up the local development environment by [installing the Google Cloud SDK](https://cloud.google.com/sdk/) and running the following commands in command line: `gcloud auth login` and `gcloud config set project [YOUR PROJECT ID]`. + +#### Installation and setup +You'll need to obtain the `gcloud-java-datastore` library. See the [Quickstart](#quickstart) section to add `gcloud-java-datastore` as a dependency in your code. + +#### Creating an authorized service object +To make authenticated requests to Google Cloud Datastore, you must create a service object with credentials. You can then make API calls by calling methods on the Datastore service object. The simplest way to authenticate is to use [Application Default Credentials](https://developers.google.com/identity/protocols/application-default-credentials). These credentials are automatically inferred from your environment, so you only need the following code to create your service object: ```java import com.google.gcloud.datastore.Datastore; -import com.google.gcloud.datastore.DatastoreFactory; import com.google.gcloud.datastore.DatastoreOptions; -import com.google.gcloud.datastore.DateTime; + +Datastore datastore = DatastoreOptions.defaultInstance().service(); +``` + +For other authentication options, see the [Authentication](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) page. + +#### Storing data +Objects in Datastore are known as entities. Entities are grouped by "kind" and have keys for easy access. In this code snippet, we will create a new entity representing a person and store that data by the person's email. First, add the following imports at the top of your file: + +```java import com.google.gcloud.datastore.Entity; import com.google.gcloud.datastore.Key; import com.google.gcloud.datastore.KeyFactory; +``` + +Then add the following code to put an entity in Datastore. + +```java +KeyFactory keyFactory = datastore.newKeyFactory().kind("Person"); +Key key = keyFactory.newKey("john.doe@gmail.com"); +Entity entity = Entity.builder(key) + .set("name", "John Doe") + .set("age", 51) + .set("favorite_food", "pizza") + .build(); +datastore.put(entity); +``` + +Later, if you want to get this entity back, add the following to your code: + +```java +Entity johnEntity = datastore.get(key); +``` + +#### Running a query +In addition to retrieving entities by their keys, you can perform queries to retrieve entities by the values of their properties. A typical query includes an entity kind, filters to select entities with matching values, and sort orders to sequence the results. `gcloud-java-datastore` supports two types of queries: `StructuredQuery` (that allows you to construct query elements) and `GqlQuery` (which operates using [GQL syntax](https://cloud.google.com/datastore/docs/apis/gql/gql_reference)) in string format. In this tutorial, we will use a simple `StructuredQuery`. + +Suppose that you've added more people to Datastore, and now you want to find all people whose favorite food is pizza. Import the following: -Datastore datastore = DatastoreFactory.instance().get(DatastoreOptions.getDefaultInstance()); -KeyFactory keyFactory = datastore.newKeyFactory().kind(KIND); -Key key = keyFactory.newKey(keyName); -Entity entity = datastore.get(key); -if (entity == null) { - entity = Entity.builder(key) - .set("name", "John Do") - .set("age", 30) - .set("access_time", DateTime.now()) - .build(); - datastore.put(entity); -} else { - System.out.println("Updating access_time for " + entity.getString("name")); - entity = Entity.builder(entity) - .set("access_time", DateTime.now()) - .build(); - datastore.update(entity); +```java +import com.google.gcloud.datastore.Query; +import com.google.gcloud.datastore.QueryResults; +import com.google.gcloud.datastore.StructuredQuery; +import com.google.gcloud.datastore.StructuredQuery.PropertyFilter; +``` + +Then add the following code to your program: + +```java +Query query = Query.entityQueryBuilder() + .kind("Person") + .filter(PropertyFilter.eq("favorite_food", "pizza")) + .build(); +QueryResults results = datastore.run(query); +while (results.hasNext()) { + Entity currentEntity = results.next(); + System.out.println(currentEntity.getString("name") + ", you're invited to a pizza party!"); } ``` +Cloud Datastore relies on indexing to run queries. Indexing is turned on by default for most types of properties. To read more about indexing, see the [Cloud Datastore Index Configuration documentation](https://cloud.google.com/datastore/docs/tools/indexconfig). + +#### Complete source code + +Here we put together all the code shown above into one program. This program assumes that you are running on Compute Engine or from your own desktop. To run this example on App Engine, move this code to your application's servlet class and print the query output to the webpage instead of `System.out`. + +```java +import com.google.gcloud.datastore.Datastore; +import com.google.gcloud.datastore.DatastoreOptions; +import com.google.gcloud.datastore.Entity; +import com.google.gcloud.datastore.Key; +import com.google.gcloud.datastore.KeyFactory; +import com.google.gcloud.datastore.Query; +import com.google.gcloud.datastore.QueryResults; +import com.google.gcloud.datastore.StructuredQuery; +import com.google.gcloud.datastore.StructuredQuery.PropertyFilter; + +public class GcloudDatastoreExample { + + public static void main(String[] args) { + // Create datastore service object. + // By default, credentials are inferred from the runtime environment. + Datastore datastore = DatastoreOptions.defaultInstance().service(); + + // Add an entity to Datastore + KeyFactory keyFactory = datastore.newKeyFactory().kind("Person"); + Key key = keyFactory.newKey("john.doe@gmail.com"); + Entity entity = Entity.builder(key) + .set("name", "John Doe") + .set("age", 51) + .set("favorite_food", "pizza") + .build(); + datastore.put(entity); + + // Get an entity from Datastore + Entity johnEntity = datastore.get(key); + + // Add a couple more entities to make the query results more interesting + Key janeKey = keyFactory.newKey("jane.doe@gmail.com"); + Entity janeEntity = Entity.builder(janeKey) + .set("name", "Jane Doe") + .set("age", 44) + .set("favorite_food", "pizza") + .build(); + Key joeKey = keyFactory.newKey("joe.shmoe@gmail.com"); + Entity joeEntity = Entity.builder(joeKey) + .set("name", "Joe Shmoe") + .set("age", 27) + .set("favorite_food", "sushi") + .build(); + datastore.put(janeEntity, joeEntity); + + // Run a query + Query query = Query.entityQueryBuilder() + .kind("Person") + .filter(PropertyFilter.eq("favorite_food", "pizza")) + .build(); + QueryResults results = datastore.run(query); + while (results.hasNext()) { + Entity currentEntity = results.next(); + System.out.println(currentEntity.getString("name") + ", you're invited to a pizza party!"); + } + } +} +``` + +Troubleshooting +--------------- + +To get help, follow the `gcloud-java` links in the `gcloud-*` [shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting). + Java Versions ------------- @@ -105,7 +227,9 @@ Contributing Contributions to this library are always welcome and highly encouraged. -See [CONTRIBUTING] for more information on how to get started. +See `gcloud-java`'s [CONTRIBUTING] documentation and the `gcloud-*` [shared documentation](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/contributing/readme.md#how-to-contribute-to-gcloud) for more information on how to get started. + +Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information. License ------- @@ -114,11 +238,10 @@ Apache 2.0 - See [LICENSE] for more information. [CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md +[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct [LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE [TESTING]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/TESTING.md#testing-code-that-uses-datastore [cloud-platform]: https://cloud.google.com/ -[cloud-datastore]: https://cloud.google.com/datastore/docs [cloud-datastore-docs]: https://cloud.google.com/datastore/docs [cloud-datastore-activation]: https://cloud.google.com/datastore/docs/activate [datastore-api]: http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/datastore/package-summary.html - diff --git a/gcloud-java-datastore/pom.xml b/gcloud-java-datastore/pom.xml index 1b4f9b4ab6bd..b33f66a19682 100644 --- a/gcloud-java-datastore/pom.xml +++ b/gcloud-java-datastore/pom.xml @@ -11,7 +11,7 @@ com.google.gcloud gcloud-java-pom - 0.0.11-SNAPSHOT + 0.1.4-SNAPSHOT @@ -23,6 +23,9 @@ + + gcloud-java-datastore + ${project.groupId} @@ -44,6 +47,12 @@ google-api-services-datastore-protobuf v1beta2-rev1-2.1.2 compile + + + com.google.api-client + google-api-client + + junit diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/BaseEntity.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/BaseEntity.java index 1846c2a0e7ae..cee0c972e663 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/BaseEntity.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/BaseEntity.java @@ -21,8 +21,8 @@ import static com.google.gcloud.datastore.DateTimeValue.of; import static com.google.gcloud.datastore.DoubleValue.of; import static com.google.gcloud.datastore.EntityValue.of; -import static com.google.gcloud.datastore.LatLngValue.of; import static com.google.gcloud.datastore.KeyValue.of; +import static com.google.gcloud.datastore.LatLngValue.of; import static com.google.gcloud.datastore.ListValue.of; import static com.google.gcloud.datastore.LongValue.of; import static com.google.gcloud.datastore.NullValue.of; @@ -48,7 +48,7 @@ * @see Google Cloud Datastore * Entities, Properties, and Keys */ -public abstract class BaseEntity +public abstract class BaseEntity extends Serializable { private static final long serialVersionUID = 8175618724683792766L; @@ -91,7 +91,7 @@ private B self() { } @SuppressWarnings("unchecked") - protected B fill(com.google.datastore.v1beta3.Entity entityPb) { + B fill(com.google.datastore.v1beta3.Entity entityPb) { Map> copiedProperties = Maps.newHashMap(); for (Map.Entry entry : entityPb.getProperties().entrySet()) { @@ -250,7 +250,7 @@ public boolean contains(String name) { /** * Returns the {@link Value} for the given property {@code name}. * - * @throws DatastoreException if not such property. + * @throws DatastoreException if not such property */ public > V getValue(String name) { @SuppressWarnings("unchecked") @@ -264,7 +264,7 @@ public > V getValue(String name) { /** * Returns true if property is an instance of NullValue. * - * @throws DatastoreException if not such property. + * @throws DatastoreException if not such property */ public boolean isNull(String name) { return getValue(name) instanceof NullValue; @@ -274,8 +274,8 @@ public boolean isNull(String name) { /** * Returns the property value as a string. * - * @throws DatastoreException if not such property. - * @throws ClassCastException if value is not a string. + * @throws DatastoreException if not such property + * @throws ClassCastException if value is not a string */ @SuppressWarnings("unchecked") public String getString(String name) { @@ -285,8 +285,8 @@ public String getString(String name) { /** * Returns the property value as long. * - * @throws DatastoreException if not such property. - * @throws ClassCastException if value is not a long. + * @throws DatastoreException if not such property + * @throws ClassCastException if value is not a long */ @SuppressWarnings("unchecked") public long getLong(String name) { @@ -296,8 +296,8 @@ public long getLong(String name) { /** * Returns the property value as a double. * - * @throws DatastoreException if not such property. - * @throws ClassCastException if value is not a double. + * @throws DatastoreException if not such property + * @throws ClassCastException if value is not a double */ @SuppressWarnings("unchecked") public double getDouble(String name) { @@ -307,8 +307,8 @@ public double getDouble(String name) { /** * Returns the property value as a boolean. * - * @throws DatastoreException if not such property. - * @throws ClassCastException if value is not a boolean. + * @throws DatastoreException if not such property + * @throws ClassCastException if value is not a boolean */ @SuppressWarnings("unchecked") public boolean getBoolean(String name) { @@ -318,8 +318,8 @@ public boolean getBoolean(String name) { /** * Returns the property value as a DateTime. * - * @throws DatastoreException if not such property. - * @throws ClassCastException if value is not a DateTime. + * @throws DatastoreException if not such property + * @throws ClassCastException if value is not a DateTime */ @SuppressWarnings("unchecked") public DateTime getDateTime(String name) { @@ -340,8 +340,8 @@ public LatLng getLatLng(String name) { /** * Returns the property value as a Key. * - * @throws DatastoreException if not such property. - * @throws ClassCastException if value is not a Key. + * @throws DatastoreException if not such property + * @throws ClassCastException if value is not a Key */ @SuppressWarnings("unchecked") public Key getKey(String name) { @@ -351,8 +351,8 @@ public Key getKey(String name) { /** * Returns the property value as an entity. * - * @throws DatastoreException if not such property. - * @throws ClassCastException if value is not an entity. + * @throws DatastoreException if not such property + * @throws ClassCastException if value is not an entity */ @SuppressWarnings("unchecked") public FullEntity getEntity(String name) { @@ -362,8 +362,8 @@ public FullEntity getEntity(String name) { /** * Returns the property value as a list of values. * - * @throws DatastoreException if not such property. - * @throws ClassCastException if value is not a list of values. + * @throws DatastoreException if not such property + * @throws ClassCastException if value is not a list of values */ @SuppressWarnings("unchecked") public List> getList(String name) { @@ -373,8 +373,8 @@ public List> getList(String name) { /** * Returns the property value as a blob. * - * @throws DatastoreException if not such property. - * @throws ClassCastException if value is not a blob. + * @throws DatastoreException if not such property + * @throws ClassCastException if value is not a blob */ @SuppressWarnings("unchecked") public Blob getBlob(String name) { @@ -393,7 +393,7 @@ ImmutableSortedMap> properties() { } @Override - protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { + Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { Builder builder = emptyBuilder(); builder.fill(com.google.datastore.v1beta3.Entity.parseFrom(bytesPb)); return builder.build(); @@ -402,8 +402,8 @@ protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { protected abstract Builder emptyBuilder(); @Override - protected final com.google.datastore.v1beta3.Entity toPb() { - com.google.datastore.v1beta3.Entity.Builder entityPb = + final com.google.datastore.v1beta3.Entity toPb() { + com.google.datastore.v1beta3.Entity.Builder entityPb = com.google.datastore.v1beta3.Entity.newBuilder(); Map propertiesPb = entityPb.getMutableProperties(); for (Map.Entry> entry : properties.entrySet()) { diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/BaseKey.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/BaseKey.java index 2e6264af9927..f01c844f8350 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/BaseKey.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/BaseKey.java @@ -30,7 +30,7 @@ /** * Base class for keys. */ -abstract class BaseKey extends Serializable { +public abstract class BaseKey extends Serializable { private static final long serialVersionUID = -4671243265877410635L; @@ -38,7 +38,12 @@ abstract class BaseKey extends Serializable { private final transient String namespace; private final transient ImmutableList path; - abstract static class Builder> { + /** + * Base class for key builders. + * + * @param the key builder. + */ + protected abstract static class Builder> { String projectId = ""; String namespace = ""; @@ -171,9 +176,9 @@ public boolean equals(Object obj) { } @Override - protected com.google.datastore.v1beta3.Key toPb() { + com.google.datastore.v1beta3.Key toPb() { com.google.datastore.v1beta3.Key.Builder keyPb = com.google.datastore.v1beta3.Key.newBuilder(); - com.google.datastore.v1beta3.PartitionId.Builder partitionIdPb = + com.google.datastore.v1beta3.PartitionId.Builder partitionIdPb = com.google.datastore.v1beta3.PartitionId.newBuilder(); partitionIdPb.setProjectId(projectId); partitionIdPb.setNamespaceId(namespace); diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Blob.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Blob.java index fbe2887d9b35..b86c4ccb963e 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Blob.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Blob.java @@ -33,7 +33,8 @@ * A Google Cloud Datastore Blob. * This class is immutable. * - * @see Google Cloud Datastore Entities, Properties, and Keys + * @see + * Google Cloud Datastore Entities, Properties, and Keys */ public final class Blob extends Serializable { @@ -106,7 +107,7 @@ public InputStream asInputStream() { * * @throws java.nio.ReadOnlyBufferException if the target is read-only * @throws java.nio.BufferOverflowException if the target's remaining() space is not large - * enough to hold the data. + * enough to hold the data */ public void copyTo(ByteBuffer target) { byteString.copyTo(target); @@ -144,12 +145,12 @@ public static Blob copyFrom(InputStream input) throws IOException { } @Override - protected com.google.datastore.v1beta3.Value toPb() { + com.google.datastore.v1beta3.Value toPb() { return com.google.datastore.v1beta3.Value.newBuilder().setBlobValue(byteString).build(); } @Override - protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { + Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { return new Blob(com.google.datastore.v1beta3.Value.parseFrom(bytesPb).getBlobValue()); } } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Cursor.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Cursor.java index df237e4d897c..c4d2b37672da 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Cursor.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Cursor.java @@ -17,19 +17,12 @@ package com.google.gcloud.datastore; import static com.google.common.base.Preconditions.checkNotNull; -import static java.nio.charset.StandardCharsets.UTF_8; import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects.ToStringHelper; -import com.google.common.base.Preconditions; +import com.google.common.io.BaseEncoding; import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; -import com.google.protobuf.TextFormat; -import com.google.protobuf.TextFormat.ParseException; - -import java.io.UnsupportedEncodingException; -import java.net.URLDecoder; -import java.net.URLEncoder; /** * A Google Cloud Datastore cursor. @@ -42,7 +35,6 @@ public final class Cursor extends Serializable REASON_TO_CODE; - private static final ImmutableMap HTTP_TO_CODE; - - private final Code code; - - /** - * An error code to represent the failure. - * - * @see Google Cloud - * Datastore error codes - */ - public enum Code { - - ABORTED(Reason.ABORTED), - DEADLINE_EXCEEDED(Reason.DEADLINE_EXCEEDED), - UNAVAILABLE(Reason.UNAVAILABLE), - FAILED_PRECONDITION(Reason.FAILED_PRECONDITION), - INVALID_ARGUMENT(Reason.INVALID_ARGUMENT), - PERMISSION_DENIED(Reason.PERMISSION_DENIED), - UNAUTHORIZED(false, "Unauthorized", 401), - INTERNAL(Reason.INTERNAL), - RESOURCE_EXHAUSTED(Reason.RESOURCE_EXHAUSTED), - UNKNOWN(false, "Unknown failure", -1); - - private final boolean retryable; - private final String description; - private final int httpStatus; - - Code(Reason reason) { - this(reason.retryable(), reason.description(), reason.httpStatus()); - } - - Code(boolean retryable, String description, int httpStatus) { - this.retryable = retryable; - this.description = description; - this.httpStatus = httpStatus; - } - - public String description() { - return description; - } - - public int httpStatus() { - return httpStatus; - } +/** + * Datastore service exception. + * + * @see Google Cloud + * Datastore error codes + */ +public class DatastoreException extends BaseServiceException { - /** - * Returns {@code true} if this exception is transient and the same request could be retried. - * For any retry it is highly recommended to apply an exponential backoff. - */ - public boolean retryable() { - return retryable; - } + // see https://cloud.google.com/datastore/docs/concepts/errors#Error_Codes" + private static final Set RETRYABLE_ERRORS = ImmutableSet.of( + new Error(10, "ABORTED"), new Error(4, "DEADLINE_EXCEEDED"), new Error(14, "UNAVAILABLE")); + private static final long serialVersionUID = 2663750991205874435L; - DatastoreException translate(DatastoreRpcException exception, String message) { - return new DatastoreException(this, message, exception); - } + public DatastoreException(int code, String message, String reason, Throwable cause) { + super(code, message, reason, true, cause); } - static { - ImmutableMap.Builder builder = ImmutableMap.builder(); - Map httpCodes = new HashMap<>(); - for (Code code : Code.values()) { - builder.put(code.name(), code); - httpCodes.put(code.httpStatus(), code); - } - REASON_TO_CODE = builder.build(); - HTTP_TO_CODE = ImmutableMap.copyOf(httpCodes); + public DatastoreException(int code, String message, String reason) { + super(code, message, reason, true); } - public DatastoreException(Code code, String message, Exception cause) { - super(MoreObjects.firstNonNull(message, code.description), cause); - this.code = code; + public DatastoreException(IOException exception) { + super(exception, true); } - public DatastoreException(Code code, String message) { - this(code, message, null); + @Override + protected Set retryableErrors() { + return RETRYABLE_ERRORS; } /** - * Returns the code associated with this exception. - */ - public Code code() { - return code; - } - - static DatastoreException translateAndThrow(RetryHelperException ex) { - if (ex.getCause() instanceof DatastoreRpcException) { - return translateAndThrow((DatastoreRpcException) ex.getCause()); - } - if (ex instanceof RetryHelper.RetryInterruptedException) { - RetryHelper.RetryInterruptedException.propagate(); - } - throw new DatastoreException(Code.UNKNOWN, ex.getMessage(), ex); - } - - /** - * Translate DatastoreException to DatastoreException based on their - * HTTP error codes. This method will always throw a new DatastoreException. + * Translate RetryHelperException to the DatastoreException that caused the error. This method + * will always throw an exception. * - * @throws DatastoreException every time + * @throws DatastoreException when {@code ex} was caused by a {@code DatastoreException} + * @throws RetryInterruptedException when {@code ex} is a {@code RetryInterruptedException} */ - static DatastoreException translateAndThrow(DatastoreRpcException exception) { - String message = exception.getMessage(); - Code code = REASON_TO_CODE.get(exception.reason()); - if (code == null) { - code = MoreObjects.firstNonNull(HTTP_TO_CODE.get(exception.httpStatus()), Code.UNKNOWN); - } - throw code.translate(exception, message); + static DatastoreException translateAndThrow(RetryHelperException ex) { + BaseServiceException.translateAndPropagateIfPossible(ex); + throw new DatastoreException(UNKNOWN_CODE, ex.getMessage(), null); } /** - * Throw a DatastoreException with {@code FAILED_PRECONDITION} code and the {@code message} - * in a nested exception. + * Throw a DatastoreException with {@code FAILED_PRECONDITION} reason and the {@code message} in a + * nested exception. * * @throws DatastoreException every time */ static DatastoreException throwInvalidRequest(String massage, Object... params) { - throw new DatastoreException(Code.FAILED_PRECONDITION, String.format(massage, params)); + throw new DatastoreException(UNKNOWN_CODE, String.format(massage, params), + "FAILED_PRECONDITION"); } static DatastoreException propagateUserException(Exception ex) { - throw new DatastoreException(Code.UNKNOWN, ex.getMessage(), ex); + throw new DatastoreException(BaseServiceException.UNKNOWN_CODE, ex.getMessage(), null, ex); } } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreFactory.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreFactory.java index a64fab3715f1..b1f5a026a3e5 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreFactory.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreFactory.java @@ -17,27 +17,10 @@ package com.google.gcloud.datastore; +import com.google.gcloud.ServiceFactory; + /** - * A base class for Datastore factories. + * An interface for Datastore factories. */ -public abstract class DatastoreFactory { - - private static final DatastoreFactory INSTANCE = new DatastoreFactory() { - @Override - public Datastore get(DatastoreOptions options) { - return new DatastoreImpl(options); - } - }; - - /** - * Returns the default factory instance. - */ - public static DatastoreFactory instance() { - return INSTANCE; - } - - /** - * Returns a {@code Datastore} service for the given options. - */ - public abstract Datastore get(DatastoreOptions options); +public interface DatastoreFactory extends ServiceFactory { } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreImpl.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreImpl.java index 59369419febf..df4e22149fd5 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreImpl.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreImpl.java @@ -22,17 +22,14 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Sets; import com.google.gcloud.BaseService; -import com.google.gcloud.ExceptionHandler; -import com.google.gcloud.ExceptionHandler.Interceptor; import com.google.gcloud.RetryHelper; import com.google.gcloud.RetryHelper.RetryHelperException; import com.google.gcloud.RetryParams; import com.google.gcloud.spi.DatastoreRpc; -import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException; import com.google.protobuf.ByteString; -import java.util.Arrays; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashMap; @@ -42,39 +39,14 @@ import java.util.Set; import java.util.concurrent.Callable; - -final class DatastoreImpl extends BaseService - implements Datastore { - - private static final Interceptor EXCEPTION_HANDLER_INTERCEPTOR = - new Interceptor() { - - private static final long serialVersionUID = 6911242958397733203L; - - @Override - public RetryResult afterEval(Exception exception, RetryResult retryResult) { - return Interceptor.RetryResult.CONTINUE_EVALUATION; - } - - @Override - public RetryResult beforeEval(Exception exception) { - if (exception instanceof DatastoreRpcException) { - boolean retryable = ((DatastoreRpcException) exception).retryable(); - return retryable ? Interceptor.RetryResult.RETRY : Interceptor.RetryResult.NO_RETRY; - } - return Interceptor.RetryResult.CONTINUE_EVALUATION; - } - }; - private static final ExceptionHandler EXCEPTION_HANDLER = ExceptionHandler.builder() - .abortOn(RuntimeException.class, DatastoreRpcException.class) - .interceptor(EXCEPTION_HANDLER_INTERCEPTOR).build(); +final class DatastoreImpl extends BaseService implements Datastore { private final DatastoreRpc datastoreRpc; private final RetryParams retryParams; DatastoreImpl(DatastoreOptions options) { super(options); - this.datastoreRpc = options.datastoreRpc(); + this.datastoreRpc = options.rpc(); retryParams = MoreObjects.firstNonNull(options.retryParams(), RetryParams.noRetries()); } @@ -108,7 +80,7 @@ com.google.datastore.v1beta3.RunQueryResponse runQuery( return RetryHelper.runWithRetries( new Callable() { @Override public com.google.datastore.v1beta3.RunQueryResponse call() - throws DatastoreRpcException { + throws DatastoreException { return datastoreRpc.runQuery(requestPb); } }, retryParams, EXCEPTION_HANDLER); @@ -127,7 +99,7 @@ public List allocateId(IncompleteKey... keys) { if (keys.length == 0) { return Collections.emptyList(); } - com.google.datastore.v1beta3.AllocateIdsRequest.Builder requestPb = + com.google.datastore.v1beta3.AllocateIdsRequest.Builder requestPb = com.google.datastore.v1beta3.AllocateIdsRequest.newBuilder(); for (IncompleteKey key : keys) { requestPb.addKeys(trimNameOrId(key).toPb()); @@ -146,7 +118,7 @@ com.google.datastore.v1beta3.AllocateIdsResponse allocateIds( return RetryHelper.runWithRetries( new Callable() { @Override public com.google.datastore.v1beta3.AllocateIdsResponse call() - throws DatastoreRpcException { + throws DatastoreException { return datastoreRpc.allocateIds(requestPb); } }, retryParams, EXCEPTION_HANDLER); @@ -183,7 +155,7 @@ public List add(FullEntity... entities) { if (completeEntity != null) { if (completeEntities.put(completeEntity.key(), completeEntity) != null) { throw DatastoreException.throwInvalidRequest( - "Duplicate entity with the key %s", entity.key()); + "Duplicate entity with the key %s", entity.key()); } } else { Preconditions.checkArgument(entity.hasKey(), "entity %s is missing a key", entity); @@ -227,7 +199,7 @@ Iterator get(com.google.datastore.v1beta3.ReadOptions readOptionsPb, fin if (keys.length == 0) { return Collections.emptyIterator(); } - com.google.datastore.v1beta3.LookupRequest.Builder requestPb = + com.google.datastore.v1beta3.LookupRequest.Builder requestPb = com.google.datastore.v1beta3.LookupRequest.newBuilder(); if (readOptionsPb != null) { requestPb.setReadOptions(readOptionsPb); @@ -276,7 +248,7 @@ com.google.datastore.v1beta3.LookupResponse lookup( return RetryHelper.runWithRetries( new Callable() { @Override public com.google.datastore.v1beta3.LookupResponse call() - throws DatastoreRpcException { + throws DatastoreException { return datastoreRpc.lookup(requestPb); } }, retryParams, EXCEPTION_HANDLER); @@ -289,7 +261,7 @@ com.google.datastore.v1beta3.LookupResponse lookup( @Override public final void update(Entity... entities) { if (entities.length > 0) { - List mutationsPb = + List mutationsPb = new ArrayList<>(); Map dedupEntities = new LinkedHashMap<>(); for (Entity entity : entities) { @@ -307,7 +279,7 @@ public final void update(Entity... entities) { @Override public final void put(Entity... entities) { if (entities.length > 0) { - List mutationsPb = + List mutationsPb = new ArrayList<>(); Map dedupEntities = new LinkedHashMap<>(); for (Entity entity : entities) { @@ -341,7 +313,7 @@ public KeyFactory newKeyFactory() { private com.google.datastore.v1beta3.CommitResponse commitMutation( List mutationsPb) { - com.google.datastore.v1beta3.CommitRequest.Builder requestPb = + com.google.datastore.v1beta3.CommitRequest.Builder requestPb = com.google.datastore.v1beta3.CommitRequest.newBuilder(); requestPb.setMode(com.google.datastore.v1beta3.CommitRequest.Mode.NON_TRANSACTIONAL); requestPb.addAllMutations(mutationsPb); @@ -353,11 +325,13 @@ com.google.datastore.v1beta3.CommitResponse commit( try { return RetryHelper.runWithRetries( new Callable() { - @Override public com.google.datastore.v1beta3.CommitResponse call() - throws DatastoreRpcException { - return datastoreRpc.commit(requestPb); - } - }, retryParams, EXCEPTION_HANDLER); + @Override + public com.google.datastore.v1beta3.CommitResponse call() throws DatastoreException { + return datastoreRpc.commit(requestPb); + } + }, + retryParams, + EXCEPTION_HANDLER); } catch (RetryHelperException e) { throw DatastoreException.translateAndThrow(e); } @@ -373,19 +347,21 @@ com.google.datastore.v1beta3.BeginTransactionResponse beginTransaction( try { return RetryHelper.runWithRetries( new Callable() { - @Override - public com.google.datastore.v1beta3.BeginTransactionResponse call() - throws DatastoreRpcException { - return datastoreRpc.beginTransaction(requestPb); - } - }, retryParams, EXCEPTION_HANDLER); + @Override + public com.google.datastore.v1beta3.BeginTransactionResponse call() + throws DatastoreException { + return datastoreRpc.beginTransaction(requestPb); + } + }, + retryParams, + EXCEPTION_HANDLER); } catch (RetryHelperException e) { throw DatastoreException.translateAndThrow(e); } } void rollbackTransaction(ByteString transaction) { - com.google.datastore.v1beta3.RollbackRequest.Builder requestPb = + com.google.datastore.v1beta3.RollbackRequest.Builder requestPb = com.google.datastore.v1beta3.RollbackRequest.newBuilder(); requestPb.setTransaction(transaction); rollback(requestPb.build()); @@ -394,7 +370,7 @@ void rollbackTransaction(ByteString transaction) { void rollback(final com.google.datastore.v1beta3.RollbackRequest requestPb) { try { RetryHelper.runWithRetries(new Callable() { - @Override public Void call() throws DatastoreRpcException { + @Override public Void call() throws DatastoreException { datastoreRpc.rollback(requestPb); return null; } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreOptions.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreOptions.java index fe318ef8720f..112d0e8d2602 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreOptions.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreOptions.java @@ -23,7 +23,6 @@ import com.google.common.collect.Iterables; import com.google.gcloud.ServiceOptions; import com.google.gcloud.spi.DatastoreRpc; -import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException; import com.google.gcloud.spi.DatastoreRpcFactory; import com.google.gcloud.spi.DefaultDatastoreRpc; @@ -32,19 +31,38 @@ import java.util.Objects; import java.util.Set; -public class DatastoreOptions extends ServiceOptions { +public class DatastoreOptions extends ServiceOptions { - private static final long serialVersionUID = -8636602944160689193L; + private static final long serialVersionUID = 5056049000758143852L; private static final String DATASTORE_SCOPE = "https://www.googleapis.com/auth/datastore"; private static final String USERINFO_SCOPE = "https://www.googleapis.com/auth/userinfo.email"; private static final Set SCOPES = ImmutableSet.of(DATASTORE_SCOPE, USERINFO_SCOPE); private final String namespace; private final boolean normalizeDataset; - private transient DatastoreRpc datastoreRpc; + + public static class DefaultDatastoreFactory implements DatastoreFactory { + + private static final DatastoreFactory INSTANCE = new DefaultDatastoreFactory(); + + @Override + public Datastore create(DatastoreOptions options) { + return new DatastoreImpl(options); + } + } + + public static class DefaultDatastoreRpcFactory implements DatastoreRpcFactory { + + private static final DatastoreRpcFactory INSTANCE = new DefaultDatastoreRpcFactory(); + + @Override + public DatastoreRpc create(DatastoreOptions options) { + return new DefaultDatastoreRpc(options); + } + } public static class Builder extends - ServiceOptions.Builder { + ServiceOptions.Builder { private String namespace; private boolean normalizeDataset = true; @@ -76,7 +94,7 @@ Builder normalizeDataset(boolean normalizeDataset) { } private DatastoreOptions(Builder builder) { - super(builder); + super(DatastoreFactory.class, DatastoreRpcFactory.class, builder); normalizeDataset = builder.normalizeDataset; namespace = builder.namespace != null ? builder.namespace : defaultNamespace(); } @@ -89,34 +107,29 @@ private DatastoreOptions normalize() { Builder builder = toBuilder(); builder.normalizeDataset(false); // Replace provided project-id with full project-id (s~xxx, e~xxx,...) - com.google.datastore.v1beta3.LookupRequest.Builder requestPb = + com.google.datastore.v1beta3.LookupRequest.Builder requestPb = com.google.datastore.v1beta3.LookupRequest.newBuilder(); com.google.datastore.v1beta3.Key key = com.google.datastore.v1beta3.Key.newBuilder() .addPath(com.google.datastore.v1beta3.Key.PathElement.newBuilder() - .setKind("__foo__").setName("bar")) + .setKind("__foo__").setName("bar")) .build(); requestPb.addKeys(key); - try { - com.google.datastore.v1beta3.LookupResponse responsePb = - datastoreRpc().lookup(requestPb.build()); - if (responsePb.getDeferredCount() > 0) { - key = responsePb.getDeferred(0); - } else { - Iterator combinedIter = - Iterables.concat(responsePb.getMissingList(), responsePb.getFoundList()).iterator(); - key = combinedIter.next().getEntity().getKey(); - } - builder.projectId(key.getPartitionId().getProjectId()); - return new DatastoreOptions(builder); - } catch (DatastoreRpcException e) { - throw DatastoreException.translateAndThrow(e); + com.google.datastore.v1beta3.LookupResponse responsePb = rpc().lookup(requestPb.build()); + if (responsePb.getDeferredCount() > 0) { + key = responsePb.getDeferred(0); + } else { + Iterator combinedIter = + Iterables.concat(responsePb.getMissingList(), responsePb.getFoundList()).iterator(); + key = combinedIter.next().getEntity().getKey(); } + builder.projectId(key.getPartitionId().getProjectId()); + return new DatastoreOptions(builder); } @Override protected String defaultHost() { String host = System.getProperty( - com.google.datastore.v1beta3.client.DatastoreHelper.LOCAL_HOST_ENV_VAR, + com.google.datastore.v1beta3.client.DatastoreHelper.LOCAL_HOST_ENV_VAR, System.getenv(com.google.datastore.v1beta3.client.DatastoreHelper.LOCAL_HOST_ENV_VAR)); return host != null ? host : super.defaultHost(); } @@ -132,13 +145,30 @@ protected String defaultProject() { return projectId != null ? projectId : super.defaultProject(); } + @SuppressWarnings("unchecked") + @Override + protected DatastoreFactory defaultServiceFactory() { + return DefaultDatastoreFactory.INSTANCE; + } + + @SuppressWarnings("unchecked") + @Override + protected DatastoreRpcFactory defaultRpcFactory() { + return DefaultDatastoreRpcFactory.INSTANCE; + } + public String namespace() { return namespace; } + /** + * Returns a default {@code DatastoreOptions} instance. + */ + public static DatastoreOptions defaultInstance() { + return builder().build(); + } + private static String defaultNamespace() { - // TODO(ozarov): An alternative to reflection would be to depend on AE api jar: - // http://mvnrepository.com/artifact/com.google.appengine/appengine-api-1.0-sdk/1.2.0 try { Class clazz = Class.forName("com.google.appengine.api.NamespaceManager"); Method method = clazz.getMethod("get"); @@ -155,6 +185,7 @@ protected Set scopes() { return SCOPES; } + @SuppressWarnings("unchecked") @Override public Builder toBuilder() { return new Builder(this); @@ -175,25 +206,6 @@ public boolean equals(Object obj) { && Objects.equals(normalizeDataset, other.normalizeDataset); } - DatastoreRpc datastoreRpc() { - if (datastoreRpc != null) { - return datastoreRpc; - } - if (serviceRpcFactory() != null) { - datastoreRpc = serviceRpcFactory().create(this); - } else { - datastoreRpc = createRpc(this, DatastoreRpcFactory.class); - if (datastoreRpc == null) { - datastoreRpc = new DefaultDatastoreRpc(this); - } - } - return datastoreRpc; - } - - public static DatastoreOptions defaultInstance() { - return builder().build(); - } - public static Builder builder() { return new Builder(); } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreReader.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreReader.java index 056895f850e3..4852dd53e16c 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreReader.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DatastoreReader.java @@ -27,7 +27,7 @@ public interface DatastoreReader { /** * Returns an {@link Entity} for the given {@link Key} or {@code null} if does not exists. * - * @throws DatastoreException upon failure. + * @throws DatastoreException upon failure */ Entity get(Key key); @@ -38,7 +38,7 @@ public interface DatastoreReader { * from the returned {@code Iterator}'s {@link Iterator#hasNext hasNext} or * {@link Iterator#next next} methods. * - * @throws DatastoreException upon failure. + * @throws DatastoreException upon failure * @see #get(Key) */ Iterator get(Key... key); @@ -53,7 +53,7 @@ public interface DatastoreReader { /** * Submit a {@link Query} and returns its result. * - * @throws DatastoreException upon failure. + * @throws DatastoreException upon failure */ QueryResults run(Query query); } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DateTime.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DateTime.java index eca352f5ba0c..5e8664395802 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DateTime.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/DateTime.java @@ -96,21 +96,21 @@ public static DateTime copyFrom(Calendar calendar) { } @Override - protected com.google.protobuf.Timestamp toPb() { + com.google.protobuf.Timestamp toPb() { return microsecondsToTimestampPb(timestampMicroseconds); } @Override - protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { + Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { return new DateTime(timestampPbToMicroseconds( com.google.protobuf.Timestamp.parseFrom(bytesPb))); } - - protected static long timestampPbToMicroseconds(com.google.protobuf.Timestamp timestampPb) { + + static long timestampPbToMicroseconds(com.google.protobuf.Timestamp timestampPb) { return timestampPb.getSeconds() * 1000000 + timestampPb.getNanos() / 1000; } - - protected static com.google.protobuf.Timestamp microsecondsToTimestampPb(long microseconds) { + + static com.google.protobuf.Timestamp microsecondsToTimestampPb(long microseconds) { long seconds = microseconds / 1000000; int nanos = (int) (microseconds % 1000000) * 1000; return com.google.protobuf.Timestamp.newBuilder().setSeconds(seconds).setNanos(nanos).build(); diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/EntityQuery.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/EntityQuery.java new file mode 100644 index 000000000000..3eda20eed3dc --- /dev/null +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/EntityQuery.java @@ -0,0 +1,65 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.datastore; + +/** + * An implementation of a Google Cloud Datastore entity query that can be constructed by providing + * all the specific query elements. + * + * @see Datastore + * queries + */ +public final class EntityQuery extends StructuredQuery { + + private static final long serialVersionUID = 2990565454831019471L; + + /** + * A {@code EntityQuery} builder for queries that return {@link Entity} results. + */ + public static final class Builder extends StructuredQuery.BuilderImpl { + + Builder(EntityQuery query) { + super(query); + } + + Builder() { + super(ResultType.ENTITY); + } + + @Override + Builder mergeFrom(com.google.datastore.v1beta3.Query queryPb) { + super.mergeFrom(queryPb); + clearProjection(); + clearDistinctOn(); + return this; + } + + @Override + public EntityQuery build() { + return new EntityQuery(this); + } + } + + EntityQuery(Builder builder) { + super(builder); + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } +} diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/FullEntity.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/FullEntity.java index b1534984aeb0..25225b853556 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/FullEntity.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/FullEntity.java @@ -17,7 +17,8 @@ package com.google.gcloud.datastore; /** - * A full entity is a {@link BaseEntity} that with a complete set of properties. + * A full entity is a {@link BaseEntity} that holds all the properties associated with a + * Datastore entity (as opposed to {@link ProjectionEntity}). */ public class FullEntity extends BaseEntity { diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/GqlQuery.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/GqlQuery.java index 6c746c7924d7..ec2ed3333947 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/GqlQuery.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/GqlQuery.java @@ -116,8 +116,8 @@ public boolean equals(Object obj) { } @Override - protected com.google.datastore.v1beta3.GqlQueryParameter toPb() { - com.google.datastore.v1beta3.GqlQueryParameter.Builder argPb = + com.google.datastore.v1beta3.GqlQueryParameter toPb() { + com.google.datastore.v1beta3.GqlQueryParameter.Builder argPb = com.google.datastore.v1beta3.GqlQueryParameter.newBuilder(); if (cursor != null) { argPb.setCursor(cursor.byteString()); @@ -129,7 +129,7 @@ protected com.google.datastore.v1beta3.GqlQueryParameter toPb() { } @Override - protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { + Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { return fromPb(com.google.datastore.v1beta3.GqlQueryParameter.parseFrom(bytesPb)); } @@ -357,12 +357,12 @@ public boolean equals(Object obj) { } @Override - protected com.google.datastore.v1beta3.GqlQuery toPb() { - com.google.datastore.v1beta3.GqlQuery.Builder queryPb = + com.google.datastore.v1beta3.GqlQuery toPb() { + com.google.datastore.v1beta3.GqlQuery.Builder queryPb = com.google.datastore.v1beta3.GqlQuery.newBuilder(); queryPb.setQueryString(queryString); queryPb.setAllowLiterals(allowLiteral); - Map namedBindingsPb = + Map namedBindingsPb = queryPb.getMutableNamedBindings(); for (Map.Entry entry : namedBindings.entrySet()) { namedBindingsPb.put(entry.getKey(), entry.getValue().toPb()); @@ -374,18 +374,18 @@ protected com.google.datastore.v1beta3.GqlQuery toPb() { } @Override - protected void populatePb(com.google.datastore.v1beta3.RunQueryRequest.Builder requestPb) { + void populatePb(com.google.datastore.v1beta3.RunQueryRequest.Builder requestPb) { requestPb.setGqlQuery(toPb()); } @Override - protected Query nextQuery(com.google.datastore.v1beta3.RunQueryResponse responsePb) { + Query nextQuery(com.google.datastore.v1beta3.RunQueryResponse responsePb) { return StructuredQuery.fromPb(type(), namespace(), responsePb.getQuery()) .nextQuery(responsePb); } @Override - protected Object fromPb(ResultType resultType, String namespace, byte[] bytesPb) + Object fromPb(ResultType resultType, String namespace, byte[] bytesPb) throws InvalidProtocolBufferException { return fromPb(resultType, namespace, com.google.datastore.v1beta3.GqlQuery.parseFrom(bytesPb)); } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/IncompleteKey.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/IncompleteKey.java index 367bdbafed71..41a109bec496 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/IncompleteKey.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/IncompleteKey.java @@ -53,7 +53,7 @@ public IncompleteKey build() { } @Override - protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { + Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { return fromPb(com.google.datastore.v1beta3.Key.parseFrom(bytesPb)); } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Key.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Key.java index 04dbdddf8e23..ccc36ef1f533 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Key.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Key.java @@ -164,7 +164,7 @@ public static Key fromUrlSafe(String urlSafe) { } @Override - protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { + Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { return fromPb(com.google.datastore.v1beta3.Key.parseFrom(bytesPb)); } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/KeyFactory.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/KeyFactory.java index 7c4efb91762d..947880111ea4 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/KeyFactory.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/KeyFactory.java @@ -58,7 +58,7 @@ public Key newKey(long id) { /** * Resets the KeyFactory to its initial state. - * @return {@code this} for chaining. + * @return {@code this} for chaining */ public KeyFactory reset() { projectId(pi); diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/KeyQuery.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/KeyQuery.java new file mode 100644 index 000000000000..4224f2d07ce5 --- /dev/null +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/KeyQuery.java @@ -0,0 +1,66 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.datastore; + +/** + * An implementation of a Google Cloud Datastore key-only query that can be constructed by providing + * all the specific query elements. + * + * @see Datastore + * queries + */ +public final class KeyQuery extends StructuredQuery { + + private static final long serialVersionUID = -746768461459070045L; + + /** + * A {@code KeyQuery} builder for queries that return {@link Key} results. + */ + public static final class Builder extends StructuredQuery.BuilderImpl { + + Builder(KeyQuery query) { + super(query); + } + + Builder() { + super(ResultType.KEY); + projection(KEY_PROPERTY_NAME); + } + + @Override + Builder mergeFrom(com.google.datastore.v1beta3.Query queryPb) { + super.mergeFrom(queryPb); + projection(KEY_PROPERTY_NAME); + clearDistinctOn(); + return this; + } + + @Override + public KeyQuery build() { + return new KeyQuery(this); + } + } + + KeyQuery(Builder builder) { + super(builder); + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } +} diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/PathElement.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/PathElement.java index b0eb9fd83855..1759cc82bcea 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/PathElement.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/PathElement.java @@ -85,8 +85,8 @@ public boolean equals(Object obj) { } @Override - protected com.google.datastore.v1beta3.Key.PathElement toPb() { - com.google.datastore.v1beta3.Key.PathElement.Builder pathElementPb = + com.google.datastore.v1beta3.Key.PathElement toPb() { + com.google.datastore.v1beta3.Key.PathElement.Builder pathElementPb = com.google.datastore.v1beta3.Key.PathElement.newBuilder(); pathElementPb.setKind(kind); if (id != null) { @@ -98,7 +98,7 @@ protected com.google.datastore.v1beta3.Key.PathElement toPb() { } @Override - protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { + Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { return fromPb(com.google.datastore.v1beta3.Key.PathElement.parseFrom(bytesPb)); } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/ProjectionEntityQuery.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/ProjectionEntityQuery.java new file mode 100644 index 000000000000..d0e9920f3a61 --- /dev/null +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/ProjectionEntityQuery.java @@ -0,0 +1,112 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.datastore; + +/** + * An implementation of a Google Cloud Datastore projection entity query that can be constructed by + * providing all the specific query elements. + * + * @see Datastore + * queries + */ +public final class ProjectionEntityQuery extends StructuredQuery { + + private static final long serialVersionUID = 5488451194542425391L; + + /** + * A {@code ProjectionEntityQuery} builder for queries that return {@link ProjectionEntity} + * results. + */ + public static final class Builder extends StructuredQuery.BuilderImpl { + + Builder(ProjectionEntityQuery query) { + super(query); + } + + Builder() { + super(ResultType.PROJECTION_ENTITY); + } + + /** + * Clears the projection clause. + */ + @Override + public Builder clearProjection() { + super.clearProjection(); + return this; + } + + /** + * Sets the query's projection clause (clearing any previously specified Projection settings). + */ + @Override + public Builder projection(String projection, String... others) { + super.projection(projection, others); + return this; + } + + /** + * Adds one or more projections to the existing projection clause. + */ + @Override + public Builder addProjection(String projection, String... others) { + super.addProjection(projection, others); + return this; + } + + /** + * Clears the group by clause. + */ + @Override + public Builder clearDistinctOn() { + super.clearDistinctOn(); + return this; + } + + /** + * Sets the query's group by clause (clearing any previously specified GroupBy settings). + */ + @Override + public Builder distinctOn(String property, String... others) { + super.distinctOn(property, others); + return this; + } + + /** + * Adds one or more properties to the existing group by clause. + */ + @Override + public Builder addDistinctOn(String property, String... others) { + super.addDistinctOn(property, others); + return this; + } + + @Override + public ProjectionEntityQuery build() { + return new ProjectionEntityQuery(this); + } + } + + ProjectionEntityQuery(Builder builder) { + super(builder); + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } +} diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Query.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Query.java index cd7d1015deda..dd0ea9f1b798 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Query.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Query.java @@ -21,9 +21,6 @@ import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects.ToStringHelper; import com.google.common.collect.Maps; -import com.google.gcloud.datastore.StructuredQuery.EntityQueryBuilder; -import com.google.gcloud.datastore.StructuredQuery.KeyQueryBuilder; -import com.google.gcloud.datastore.StructuredQuery.ProjectionEntityQueryBuilder; import com.google.protobuf.GeneratedMessage; import com.google.protobuf.InvalidProtocolBufferException; @@ -34,6 +31,10 @@ * A Google Cloud Datastore query. * For usage examples see {@link GqlQuery} and {@link StructuredQuery}. * + * Note that queries require proper indexing. See + * + * Cloud Datastore Index Configuration for help configuring indexes. + * * @param the type of the values returned by this query. * @see Datastore Queries */ @@ -57,42 +58,46 @@ public abstract static class ResultType implements java.io.Serializable { PB_TO_INSTANCE = Maps.newEnumMap( com.google.datastore.v1beta3.EntityResult.ResultType.class); - static final ResultType UNKNOWN = new ResultType(null, Object.class) { + static final ResultType UNKNOWN = + new ResultType(null, Object.class) { - private static final long serialVersionUID = 1602329532153860907L; + private static final long serialVersionUID = 1602329532153860907L; - @Override protected Object convert(com.google.datastore.v1beta3.Entity entityPb) { - if (entityPb.getProperties().isEmpty()) { - if (!entityPb.hasKey()) { - return null; + @Override + Object convert(com.google.datastore.v1beta3.Entity entityPb) { + if (entityPb.getProperties().isEmpty()) { + if (!entityPb.hasKey()) { + return null; + } + return Key.fromPb(entityPb.getKey()); + } + return ProjectionEntity.fromPb(entityPb); } - return Key.fromPb(entityPb.getKey()); - } - return ProjectionEntity.fromPb(entityPb); - } - }; + }; public static final ResultType ENTITY = new ResultType( com.google.datastore.v1beta3.EntityResult.ResultType.FULL, Entity.class) { - private static final long serialVersionUID = 7712959777507168274L; + private static final long serialVersionUID = 7712959777507168274L; - @Override protected Entity convert(com.google.datastore.v1beta3.Entity entityPb) { - return Entity.fromPb(entityPb); - } - }; + @Override + Entity convert(com.google.datastore.v1beta3.Entity entityPb) { + return Entity.fromPb(entityPb); + } + }; public static final ResultType KEY = new ResultType( com.google.datastore.v1beta3.EntityResult.ResultType.KEY_ONLY, Key.class) { - private static final long serialVersionUID = -8514289244104446252L; + private static final long serialVersionUID = -8514289244104446252L; - @Override protected Key convert(com.google.datastore.v1beta3.Entity entityPb) { - return Key.fromPb(entityPb.getKey()); - } - }; + @Override + Key convert(com.google.datastore.v1beta3.Entity entityPb) { + return Key.fromPb(entityPb.getKey()); + } + }; public static final ResultType PROJECTION_ENTITY = new ResultType( @@ -101,11 +106,11 @@ public abstract static class ResultType implements java.io.Serializable { private static final long serialVersionUID = -7591409419690650246L; - @Override protected ProjectionEntity convert( - com.google.datastore.v1beta3.Entity entityPb) { + @Override + ProjectionEntity convert(com.google.datastore.v1beta3.Entity entityPb) { return ProjectionEntity.fromPb(entityPb); } - }; + }; private final Class resultClass; private final com.google.datastore.v1beta3.EntityResult.ResultType queryType; @@ -152,7 +157,7 @@ boolean isAssignableFrom(ResultType otherResultType) { return resultClass.isAssignableFrom(otherResultType.resultClass); } - protected abstract V convert(com.google.datastore.v1beta3.Entity entityPb); + abstract V convert(com.google.datastore.v1beta3.Entity entityPb); static ResultType fromPb(com.google.datastore.v1beta3.EntityResult.ResultType typePb) { return MoreObjects.firstNonNull(PB_TO_INSTANCE.get(typePb), UNKNOWN); @@ -182,17 +187,16 @@ public String toString() { } @Override - protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { + Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { return fromPb(resultType, namespace, bytesPb); } - protected abstract Object fromPb(ResultType resultType, String namespace, byte[] bytesPb) + abstract Object fromPb(ResultType resultType, String namespace, byte[] bytesPb) throws InvalidProtocolBufferException; - protected abstract void populatePb( - com.google.datastore.v1beta3.RunQueryRequest.Builder requestPb); + abstract void populatePb(com.google.datastore.v1beta3.RunQueryRequest.Builder requestPb); - protected abstract Query nextQuery(com.google.datastore.v1beta3.RunQueryResponse responsePb); + abstract Query nextQuery(com.google.datastore.v1beta3.RunQueryResponse responsePb); /** * Returns a new {@link GqlQuery} builder. @@ -215,21 +219,21 @@ public static GqlQuery.Builder gqlQueryBuilder(ResultType resultType, /** * Returns a new {@link StructuredQuery} builder for full (complete entities) queries. */ - public static EntityQueryBuilder entityQueryBuilder() { - return new EntityQueryBuilder(); + public static EntityQuery.Builder entityQueryBuilder() { + return new EntityQuery.Builder(); } /** * Returns a new {@link StructuredQuery} builder for key only queries. */ - public static KeyQueryBuilder keyQueryBuilder() { - return new KeyQueryBuilder(); + public static KeyQuery.Builder keyQueryBuilder() { + return new KeyQuery.Builder(); } /** * Returns a new {@link StructuredQuery} builder for projection queries. */ - public static ProjectionEntityQueryBuilder projectionEntityQueryBuilder() { - return new ProjectionEntityQueryBuilder(); + public static ProjectionEntityQuery.Builder projectionEntityQueryBuilder() { + return new ProjectionEntityQuery.Builder(); } } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/QueryResults.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/QueryResults.java index 110add0bbbe4..a6e5971936dd 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/QueryResults.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/QueryResults.java @@ -22,8 +22,9 @@ * The result of a Google Cloud Datastore query submission. * When the result is not typed it is possible to cast it to its appropriate type according to * the {@link #resultClass} value. - * Results are loaded lazily; therefore it is possible to get a {@code DatastoreException} - * upon {@link Iterator#hasNext hasNext} or {@link Iterator#next next} calls. + * Results are loaded lazily in batches, where batch size is set by Cloud Datastore. As a result, it + * is possible to get a {@code DatastoreException} upon {@link Iterator#hasNext hasNext} or + * {@link Iterator#next next} calls. * * @param the type of the results value. */ @@ -49,7 +50,7 @@ public interface QueryResults extends Iterator { * // Consume some results (using results.next()) and do any other actions as necessary. * query = query.toBuilder().startCursor(results.cursorAfter()).build(); * results = datastore.run(query); // now we will iterate over all entities not yet consumed - * + * } */ Cursor cursorAfter(); } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/QueryResultsImpl.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/QueryResultsImpl.java index 10f3eeda58ac..ec3a652c6131 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/QueryResultsImpl.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/QueryResultsImpl.java @@ -39,7 +39,6 @@ class QueryResultsImpl extends AbstractIterator implements QueryResults private Iterator entityResultPbIter; private ByteString cursor; - QueryResultsImpl(DatastoreImpl datastore, com.google.datastore.v1beta3.ReadOptions readOptionsPb, Query query) { this.datastore = datastore; @@ -94,6 +93,7 @@ protected T computeNext() { sendRequest(); } if (!entityResultPbIter.hasNext()) { + cursor = runQueryResponsePb.getBatch().getEndCursor(); return endOfData(); } com.google.datastore.v1beta3.EntityResult entityResultPb = entityResultPbIter.next(); diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Serializable.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Serializable.java index ff62fe89195f..89d19bcfd892 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Serializable.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Serializable.java @@ -46,7 +46,7 @@ private void readObject(ObjectInputStream input) throws IOException, ClassNotFou bytesPb = (byte[]) input.readObject(); } - protected Object readResolve() throws ObjectStreamException { + Object readResolve() throws ObjectStreamException { try { return fromPb(bytesPb); } catch (InvalidProtocolBufferException ex) { @@ -58,7 +58,7 @@ protected Object readResolve() throws ObjectStreamException { } } - protected abstract M toPb(); + abstract M toPb(); - protected abstract Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException; + abstract Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException; } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/StructuredQuery.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/StructuredQuery.java index 7a05c23437ce..149b0030cf48 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/StructuredQuery.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/StructuredQuery.java @@ -80,10 +80,10 @@ * @see Datastore * queries */ -public class StructuredQuery extends Query { +public abstract class StructuredQuery extends Query { private static final long serialVersionUID = 546838955624019594L; - private static final String KEY_PROPERTY_NAME = "__key__"; + static final String KEY_PROPERTY_NAME = "__key__"; private final transient String kind; private final ImmutableList projection; @@ -102,7 +102,7 @@ public abstract static class Filter implements Serializable { Filter() { } - protected abstract com.google.datastore.v1beta3.Filter toPb(); + abstract com.google.datastore.v1beta3.Filter toPb(); static Filter fromPb(com.google.datastore.v1beta3.Filter filterPb) { switch (filterPb.getFilterTypeCase()) { @@ -192,7 +192,7 @@ public static CompositeFilter and(Filter first, Filter... other) { } @Override - protected com.google.datastore.v1beta3.Filter toPb() { + com.google.datastore.v1beta3.Filter toPb() { com.google.datastore.v1beta3.Filter.Builder filterPb = com.google.datastore.v1beta3.Filter.newBuilder(); com.google.datastore.v1beta3.CompositeFilter.Builder compositeFilterPb = @@ -239,7 +239,7 @@ private PropertyFilter(String property, Operator operator, Value value) { this.value = checkNotNull(value); } - public static PropertyFilter fromPb(com.google.datastore.v1beta3.PropertyFilter propertyFilterPb) { + static PropertyFilter fromPb(com.google.datastore.v1beta3.PropertyFilter propertyFilterPb) { String property = propertyFilterPb.getProperty().getName(); Operator operator = Operator.fromPb(propertyFilterPb.getOp()); Value value = Value.fromPb(propertyFilterPb.getValue()); @@ -443,7 +443,7 @@ public static PropertyFilter isNull(String property) { } @Override - protected com.google.datastore.v1beta3.Filter toPb() { + com.google.datastore.v1beta3.Filter toPb() { com.google.datastore.v1beta3.Filter.Builder filterPb = com.google.datastore.v1beta3.Filter.newBuilder(); com.google.datastore.v1beta3.PropertyFilter.Builder propertyFilterPb = @@ -531,7 +531,48 @@ static OrderBy fromPb(com.google.datastore.v1beta3.PropertyOrder propertyOrderPb } } - static class BaseBuilder> { + /** + * Interface for StructuredQuery builders. + * + * @param the type of result the query returns. + */ + public interface Builder { + Builder namespace(String namespace); + + Builder kind(String kind); + + Builder startCursor(Cursor startCursor); + + Builder endCursor(Cursor endCursor); + + Builder offset(int offset); + + Builder limit(Integer limit); + + Builder filter(Filter filter); + + Builder clearOrderBy(); + + /** + * Sets the query's order by clause (clearing any previously specified OrderBy settings). + */ + Builder orderBy(OrderBy orderBy, OrderBy... others); + + /** + * Adds settings to the existing order by clause. + */ + Builder addOrderBy(OrderBy orderBy, OrderBy... others); + + StructuredQuery build(); + } + + /** + * Base class for StructuredQuery builders. + * + * @param the type of result the query returns. + * @param the query builder. + */ + abstract static class BuilderImpl> implements Builder { private final ResultType resultType; private String namespace; @@ -545,69 +586,87 @@ static class BaseBuilder> { private int offset; private Integer limit; - BaseBuilder(ResultType resultType) { + BuilderImpl(ResultType resultType) { this.resultType = resultType; } + BuilderImpl(StructuredQuery query) { + this(query.type()); + namespace = query.namespace(); + kind = query.kind; + projection.addAll(query.projection); + filter = query.filter; + distinctOn.addAll(query.distinctOn); + orderBy.addAll(query.orderBy); + startCursor = query.startCursor; + endCursor = query.endCursor; + offset = query.offset; + limit = query.limit; + } + @SuppressWarnings("unchecked") B self() { return (B) this; } + @Override public B namespace(String namespace) { this.namespace = namespace; return self(); } + @Override public B kind(String kind) { this.kind = kind; return self(); } + @Override public B startCursor(Cursor startCursor) { this.startCursor = startCursor; return self(); } + @Override public B endCursor(Cursor endCursor) { this.endCursor = endCursor; return self(); } + @Override public B offset(int offset) { Preconditions.checkArgument(offset >= 0, "offset must not be negative"); this.offset = offset; return self(); } + @Override public B limit(Integer limit) { Preconditions.checkArgument(limit == null || limit > 0, "limit must be positive"); this.limit = limit; return self(); } + @Override public B filter(Filter filter) { this.filter = filter; return self(); } + @Override public B clearOrderBy() { orderBy.clear(); return self(); } - /** - * Sets the query's order by clause (clearing any previously specified OrderBy settings). - */ + @Override public B orderBy(OrderBy orderBy, OrderBy... others) { clearOrderBy(); addOrderBy(orderBy, others); return self(); } - /** - * Adds settings to the existing order by clause. - */ + @Override public B addOrderBy(OrderBy orderBy, OrderBy... others) { this.orderBy.add(orderBy); Collections.addAll(this.orderBy, others); @@ -680,117 +739,9 @@ B mergeFrom(com.google.datastore.v1beta3.Query queryPb) { } return self(); } - - public StructuredQuery build() { - return new StructuredQuery<>(this); - } - } - - static final class Builder extends BaseBuilder> { - - Builder(ResultType resultType) { - super(resultType); - } - } - - /** - * A StructuredQuery builder for queries that return Entity results. - */ - public static final class EntityQueryBuilder extends BaseBuilder { - - EntityQueryBuilder() { - super(ResultType.ENTITY); - } - - @Override - public StructuredQuery build() { - return new StructuredQuery<>(this); - } - } - - /** - * A StructuredQuery builder for queries that return Key results. - */ - public static final class KeyQueryBuilder extends BaseBuilder { - - KeyQueryBuilder() { - super(ResultType.KEY); - projection(KEY_PROPERTY_NAME); - } - - @Override - protected KeyQueryBuilder mergeFrom(com.google.datastore.v1beta3.Query queryPb) { - super.mergeFrom(queryPb); - projection(KEY_PROPERTY_NAME); - clearDistinctOn(); - return this; - } - - @Override - public StructuredQuery build() { - return new StructuredQuery<>(this); - } - } - - /** - * A StructuredQuery builder for projection queries. - */ - public static final class ProjectionEntityQueryBuilder - extends BaseBuilder { - - ProjectionEntityQueryBuilder() { - super(ResultType.PROJECTION_ENTITY); - } - - @Override - public StructuredQuery build() { - return new StructuredQuery<>(this); - } - - @Override - public ProjectionEntityQueryBuilder clearProjection() { - return super.clearProjection(); - } - - /** - * Sets the query's projection clause (clearing any previously specified Projection settings). - */ - @Override - public ProjectionEntityQueryBuilder projection(String projection, String... others) { - return super.projection(projection, others); - } - - /** - * Adds one or more projections to the existing projection clause. - */ - @Override - public ProjectionEntityQueryBuilder addProjection(String projection, String... others) { - return super.addProjection(projection, others); - } - - @Override - public ProjectionEntityQueryBuilder clearDistinctOn() { - return super.clearDistinctOn(); - } - - /** - * Sets the query's group by clause (clearing any previously specified GroupBy settings). - */ - @Override - public ProjectionEntityQueryBuilder distinctOn(String property, String... others) { - return super.distinctOn(property, others); - } - - /** - * Adds one or more properties to the existing group by clause. - */ - @Override - public ProjectionEntityQueryBuilder addDistinctOn(String property, String... others) { - return super.addDistinctOn(property, others); - } } - StructuredQuery(BaseBuilder builder) { + StructuredQuery(BuilderImpl builder) { super(builder.resultType, builder.namespace); kind = builder.kind; projection = ImmutableList.copyOf(builder.projection); @@ -871,15 +822,16 @@ public Integer limit() { return limit; } + public abstract Builder toBuilder(); + @Override - protected void populatePb(com.google.datastore.v1beta3.RunQueryRequest.Builder requestPb) { + void populatePb(com.google.datastore.v1beta3.RunQueryRequest.Builder requestPb) { requestPb.setQuery(toPb()); } @Override - protected Query nextQuery(com.google.datastore.v1beta3.RunQueryResponse responsePb) { - Builder builder = new Builder<>(type()); - builder.mergeFrom(toPb()); + StructuredQuery nextQuery(com.google.datastore.v1beta3.RunQueryResponse responsePb) { + Builder builder = toBuilder(); builder.startCursor(new Cursor(responsePb.getBatch().getEndCursor())); if (offset > 0 && responsePb.getBatch().getSkippedResults() < offset) { builder.offset(offset - responsePb.getBatch().getSkippedResults()); @@ -893,7 +845,7 @@ protected Query nextQuery(com.google.datastore.v1beta3.RunQueryResponse respo } @Override - protected com.google.datastore.v1beta3.Query toPb() { + com.google.datastore.v1beta3.Query toPb() { com.google.datastore.v1beta3.Query.Builder queryPb = com.google.datastore.v1beta3.Query.newBuilder(); if (kind != null) { @@ -932,22 +884,21 @@ protected com.google.datastore.v1beta3.Query toPb() { } @Override - protected Object fromPb(ResultType resultType, String namespace, byte[] bytesPb) + Object fromPb(ResultType resultType, String namespace, byte[] bytesPb) throws InvalidProtocolBufferException { return fromPb(resultType, namespace, com.google.datastore.v1beta3.Query.parseFrom(bytesPb)); } @SuppressWarnings("unchecked") - static StructuredQuery fromPb( - ResultType resultType, String namespace, + static StructuredQuery fromPb(ResultType resultType, String namespace, com.google.datastore.v1beta3.Query queryPb) { - BaseBuilder builder; + BuilderImpl builder; if (resultType.equals(ResultType.ENTITY)) { - builder = new EntityQueryBuilder(); + builder = new EntityQuery.Builder(); } else if (resultType.equals(ResultType.KEY)) { - builder = new KeyQueryBuilder(); + builder = new KeyQuery.Builder(); } else { - builder = new ProjectionEntityQueryBuilder(); + builder = new ProjectionEntityQuery.Builder(); } return (StructuredQuery) builder.namespace(namespace).mergeFrom(queryPb).build(); } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Transaction.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Transaction.java index 9d676bc68a8c..8089c0130f5d 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Transaction.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Transaction.java @@ -47,7 +47,8 @@ * } * } * - * @see Google Cloud Datastore transactions + * @see + * Google Cloud Datastore transactions * */ public interface Transaction extends DatastoreBatchWriter, DatastoreReaderWriter { diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Value.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Value.java index 9e4aae0fb6f5..4f3d39f1dae1 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Value.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/Value.java @@ -63,7 +63,7 @@ public final B fromProto(com.google.datastore.v1beta3.Value proto) { @SuppressWarnings("deprecation") @Override public final com.google.datastore.v1beta3.Value toProto(P value) { - com.google.datastore.v1beta3.Value.Builder builder = + com.google.datastore.v1beta3.Value.Builder builder = com.google.datastore.v1beta3.Value.newBuilder(); builder.setExcludeFromIndexes(value.excludeFromIndexes()); builder.setMeaning(value.meaning()); @@ -193,19 +193,19 @@ public boolean equals(Object obj) { @Override @SuppressWarnings("unchecked") - protected com.google.datastore.v1beta3.Value toPb() { + com.google.datastore.v1beta3.Value toPb() { return type().getMarshaller().toProto(this); } static Value fromPb(com.google.datastore.v1beta3.Value proto) { ValueTypeCase descriptorId = proto.getValueTypeCase(); ValueType valueType = ValueType.getByDescriptorId(descriptorId.getNumber()); - return valueType == null ? RawValue.MARSHALLER.fromProto(proto).build() + return valueType == null ? RawValue.MARSHALLER.fromProto(proto).build() : valueType.getMarshaller().fromProto(proto).build(); } @Override - protected Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { + Object fromPb(byte[] bytesPb) throws InvalidProtocolBufferException { return fromPb(com.google.datastore.v1beta3.Value.parseFrom(bytesPb)); } } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/ValueBuilder.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/ValueBuilder.java index 5094062e1ec8..236118a18620 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/ValueBuilder.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/ValueBuilder.java @@ -18,6 +18,10 @@ /** * A common interface for Value builders. + * + * @param the data type that the {@code Value} object holds. + * @param

the value type. + * @param the value type's associated builder. */ public interface ValueBuilder, B extends ValueBuilder> { diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/package-info.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/package-info.java index f63be902c319..1710ab0b4d33 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/package-info.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/package-info.java @@ -17,10 +17,9 @@ /** * A client to the Google Cloud Datastore. * - *

A simple usage example: + *

Here's a simple usage example for using gcloud-java from App/Compute Engine: *

 {@code
- * DatastoreOptions options = DatastoreOptions.builder().projectId(PROJECT_ID).build();
- * Datastore datastore = DatastoreFactory.instance().get(options);
+ * Datastore datastore = DatastoreOptions.defaultInstance().service();
  * KeyFactory keyFactory = datastore.newKeyFactory().kind(kind);
  * Key key = keyFactory.newKey(keyName);
  * Entity entity = datastore.get(key);
@@ -47,6 +46,11 @@
  * }
  * } 
* + *

When using gcloud-java from outside of App/Compute Engine, you have to specify a + * project ID and + * provide + * credentials. * @see Google Cloud Datastore */ package com.google.gcloud.datastore; diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/testing/LocalGcdHelper.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/testing/LocalGcdHelper.java index def42fdb24f6..a4b8fba69d56 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/testing/LocalGcdHelper.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/testing/LocalGcdHelper.java @@ -524,7 +524,8 @@ private static void extractFile(ZipInputStream zipIn, File filePath) throws IOEx } } - public static void sendQuitRequest(int port) { + public static boolean sendQuitRequest(int port) { + StringBuilder result = new StringBuilder(); try { URL url = new URL("http", "localhost", port, "/_ah/admin/quit"); HttpURLConnection con = (HttpURLConnection) url.openConnection(); @@ -535,12 +536,14 @@ public static void sendQuitRequest(int port) { out.write("".getBytes()); out.flush(); InputStream in = con.getInputStream(); - while (in.read() != -1) { - // consume input + int currByte = 0; + while ((currByte = in.read()) != -1) { + result.append(((char) currByte)); } } catch (IOException ignore) { // ignore } + return result.toString().startsWith("Shutting down local server"); } public void stop() throws IOException, InterruptedException { diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/testing/package-info.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/testing/package-info.java index 9d20b3c7ffd8..d03c9d85cd09 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/testing/package-info.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/datastore/testing/package-info.java @@ -25,9 +25,9 @@ * .projectId(PROJECT_ID) * .host("localhost:8080") * .build(); - * Datastore localDatastore = DatastoreFactory.instance().get(options); + * Datastore localDatastore = options.service(); * } - * + * *

After the test: *

 {@code
  * gcdHelper.stop();
diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpc.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpc.java
index 329623a565f6..14aad0d9b2e4 100644
--- a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpc.java
+++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpc.java
@@ -13,99 +13,31 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package com.google.gcloud.spi;
 
+import com.google.gcloud.datastore.DatastoreException;
+
 /**
  * Provides access to the remote Datastore service.
  */
 public interface DatastoreRpc {
 
-  public class DatastoreRpcException extends Exception {
-
-    /**
-     * The reason for the exception.
-     *
-     * @see Google
-     *     Cloud Datastore error codes
-     */
-    public enum Reason {
-
-      ABORTED(true, "Request aborted", 409),
-      DEADLINE_EXCEEDED(true, "Deadline exceeded", 403),
-      FAILED_PRECONDITION(false, "Invalid request", 412),
-      INTERNAL(false, "Server returned an error", 500),
-      INVALID_ARGUMENT(false, "Request parameter has an invalid value", 400),
-      PERMISSION_DENIED(false, "Unauthorized request", 403),
-      RESOURCE_EXHAUSTED(false, "Quota exceeded", 402),
-      UNAVAILABLE(true, "Could not reach service", 503);
-
-      private final boolean retryable;
-      private final String description;
-      private final int httpStatus;
-
-      private Reason(boolean retryable, String description, int httpStatus) {
-        this.retryable = retryable;
-        this.description = description;
-        this.httpStatus = httpStatus;
-      }
-
-      public boolean retryable() {
-        return retryable;
-      }
-
-      public String description() {
-        return description;
-      }
-
-      public int httpStatus() {
-        return httpStatus;
-      }
-    }
-
-    private final String reason;
-    private final int httpStatus;
-    private final boolean retryable;
-
-    public DatastoreRpcException(Reason reason) {
-      this(reason.name(), reason.httpStatus, reason.retryable, reason.description);
-    }
-
-    public DatastoreRpcException(String reason, int httpStatus, boolean retryable, String message) {
-      super(message);
-      this.reason = reason;
-      this.httpStatus = httpStatus;
-      this.retryable = retryable;
-    }
-
-    public String reason() {
-      return reason;
-    }
-
-    public int httpStatus() {
-      return httpStatus;
-    }
-
-    public boolean retryable() {
-      return retryable;
-    }
-  }
-
   com.google.datastore.v1beta3.AllocateIdsResponse allocateIds(
-      com.google.datastore.v1beta3.AllocateIdsRequest request) throws DatastoreRpcException;
+      com.google.datastore.v1beta3.AllocateIdsRequest request) throws DatastoreException;
 
   com.google.datastore.v1beta3.BeginTransactionResponse beginTransaction(
-      com.google.datastore.v1beta3.BeginTransactionRequest request)
-      throws DatastoreRpcException;
+      com.google.datastore.v1beta3.BeginTransactionRequest request) throws DatastoreException;
 
   com.google.datastore.v1beta3.CommitResponse commit(
-      com.google.datastore.v1beta3.CommitRequest request) throws DatastoreRpcException;
+      com.google.datastore.v1beta3.CommitRequest request) throws DatastoreException;
 
   com.google.datastore.v1beta3.LookupResponse lookup(
-      com.google.datastore.v1beta3.LookupRequest request) throws DatastoreRpcException;
+      com.google.datastore.v1beta3.LookupRequest request) throws DatastoreException;
 
   com.google.datastore.v1beta3.RollbackResponse rollback(
-      com.google.datastore.v1beta3.RollbackRequest request) throws DatastoreRpcException;
+      com.google.datastore.v1beta3.RollbackRequest request) throws DatastoreException;
 
   com.google.datastore.v1beta3.RunQueryResponse runQuery(
-      com.google.datastore.v1beta3.RunQueryRequest request) throws DatastoreRpcException;
+      com.google.datastore.v1beta3.RunQueryRequest request) throws DatastoreException;
 }
diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java
index 9ec1f25746ed..6f8dc48f8a5e 100644
--- a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java
+++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java
@@ -16,37 +16,19 @@
 
 package com.google.gcloud.spi;
 
-import com.google.common.collect.ImmutableMap;
+import com.google.gcloud.datastore.DatastoreException;
 import com.google.gcloud.datastore.DatastoreOptions;
-import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException.Reason;
 
+import java.io.IOException;
 import java.net.InetAddress;
-import java.net.MalformedURLException;
 import java.net.URL;
-import java.net.UnknownHostException;
-import java.util.HashMap;
-import java.util.Map;
 
 public class DefaultDatastoreRpc implements DatastoreRpc {
 
   private final com.google.datastore.v1beta3.client.Datastore client;
 
-  private static final ImmutableMap STR_TO_REASON;
-  private static final ImmutableMap HTTP_STATUS_TO_REASON;
-
-  static {
-    ImmutableMap.Builder builder = ImmutableMap.builder();
-    Map httpCodes = new HashMap<>();
-    for (Reason reason : Reason.values()) {
-      builder.put(reason.name(), reason);
-      httpCodes.put(reason.httpStatus(), reason);
-    }
-    STR_TO_REASON = builder.build();
-    HTTP_STATUS_TO_REASON = ImmutableMap.copyOf(httpCodes);
-  }
-
   public DefaultDatastoreRpc(DatastoreOptions options) {
-    com.google.datastore.v1beta3.client.DatastoreOptions.Builder clientBuilder = 
+    com.google.datastore.v1beta3.client.DatastoreOptions.Builder clientBuilder =
         new com.google.datastore.v1beta3.client.DatastoreOptions.Builder()
             .projectId(options.projectId())
             .initializer(options.httpRequestInitializer());
@@ -75,7 +57,7 @@ private static boolean isLocalHost(String host) {
         String normalizedHost = "http://" + removeScheme(host);
         InetAddress hostAddr = InetAddress.getByName(new URL(normalizedHost).getHost());
         return hostAddr.isAnyLocalAddress() || hostAddr.isLoopbackAddress();
-      } catch (UnknownHostException | MalformedURLException e) {
+      } catch (Exception e) {
         // ignore
       }
     }
@@ -93,37 +75,35 @@ private static String removeScheme(String url) {
     return url;
   }
 
-  private static DatastoreRpcException translate(
+  private static DatastoreException translate(
       com.google.datastore.v1beta3.client.DatastoreException exception) {
-    String reasonStr = "";
+    String reason = "";
     if (exception.getCode() != null) {
-      reasonStr = exception.getCode().name();
+      reason = exception.getCode().name();
     }
-    Reason reason = STR_TO_REASON.get(reasonStr);
-    if (reason == null) {
-      reason = HTTP_STATUS_TO_REASON.get(exception.getCode());
+    if (reason.isEmpty()) {
+      if (exception.getCause() instanceof IOException) {
+        return new DatastoreException((IOException) exception.getCause());
+      }
     }
-    return reason != null
-        ? new DatastoreRpcException(reason)
-        : new DatastoreRpcException("Unknown", 
-            exception.getCode().ordinal(), 
-            false, 
-            exception.getMessage());
+    return new DatastoreException(
+        exception.getCode().ordinal(), exception.getMessage(), reason, exception);
   }
 
   @Override
   public com.google.datastore.v1beta3.AllocateIdsResponse allocateIds(
-      com.google.datastore.v1beta3.AllocateIdsRequest request) throws DatastoreRpcException {
+      com.google.datastore.v1beta3.AllocateIdsRequest request) throws DatastoreException {
     try {
       return client.allocateIds(request);
     } catch (com.google.datastore.v1beta3.client.DatastoreException ex) {
+
       throw translate(ex);
     }
   }
 
   @Override
   public com.google.datastore.v1beta3.BeginTransactionResponse beginTransaction(
-      com.google.datastore.v1beta3.BeginTransactionRequest request) throws DatastoreRpcException {
+      com.google.datastore.v1beta3.BeginTransactionRequest request) throws DatastoreException {
     try {
       return client.beginTransaction(request);
     } catch (com.google.datastore.v1beta3.client.DatastoreException ex) {
@@ -133,7 +113,7 @@ public com.google.datastore.v1beta3.BeginTransactionResponse beginTransaction(
 
   @Override
   public com.google.datastore.v1beta3.CommitResponse commit(
-      com.google.datastore.v1beta3.CommitRequest request) throws DatastoreRpcException {
+      com.google.datastore.v1beta3.CommitRequest request) throws DatastoreException {
     try {
       return client.commit(request);
     } catch (com.google.datastore.v1beta3.client.DatastoreException ex) {
@@ -143,7 +123,7 @@ public com.google.datastore.v1beta3.CommitResponse commit(
 
   @Override
   public com.google.datastore.v1beta3.LookupResponse lookup(
-      com.google.datastore.v1beta3.LookupRequest request) throws DatastoreRpcException {
+      com.google.datastore.v1beta3.LookupRequest request) throws DatastoreException {
     try {
       return client.lookup(request);
     } catch (com.google.datastore.v1beta3.client.DatastoreException ex) {
@@ -153,7 +133,7 @@ public com.google.datastore.v1beta3.LookupResponse lookup(
 
   @Override
   public com.google.datastore.v1beta3.RollbackResponse rollback(
-      com.google.datastore.v1beta3.RollbackRequest request) throws DatastoreRpcException {
+      com.google.datastore.v1beta3.RollbackRequest request) throws DatastoreException {
     try {
       return client.rollback(request);
     } catch (com.google.datastore.v1beta3.client.DatastoreException ex) {
@@ -163,7 +143,7 @@ public com.google.datastore.v1beta3.RollbackResponse rollback(
 
   @Override
   public com.google.datastore.v1beta3.RunQueryResponse runQuery(
-      com.google.datastore.v1beta3.RunQueryRequest request) throws DatastoreRpcException {
+      com.google.datastore.v1beta3.RunQueryRequest request) throws DatastoreException {
     try {
       return client.runQuery(request);
     } catch (com.google.datastore.v1beta3.client.DatastoreException ex) {
@@ -171,4 +151,3 @@ public com.google.datastore.v1beta3.RunQueryResponse runQuery(
     }
   }
 }
-
diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/BaseKeyTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/BaseKeyTest.java
index c8773243ed69..ee1866e2a906 100644
--- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/BaseKeyTest.java
+++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/BaseKeyTest.java
@@ -115,10 +115,10 @@ public void testAncestors() throws Exception {
     BaseKey key = builder.build();
     assertTrue(key.ancestors().isEmpty());
     List path = new ArrayList<>();
-    path.add(PathElement.of("p1","v1"));
+    path.add(PathElement.of("p1", "v1"));
     key = builder.ancestors(path.get(0)).build();
     assertEquals(path, key.ancestors());
-    path.add(PathElement.of("p2","v2"));
+    path.add(PathElement.of("p2", "v2"));
     key = builder.ancestors(path.get(1)).build();
     assertEquals(path, key.ancestors());
   }
diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreExceptionTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreExceptionTest.java
index a64a3531c19d..301a863476b6 100644
--- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreExceptionTest.java
+++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreExceptionTest.java
@@ -16,39 +16,80 @@
 
 package com.google.gcloud.datastore;
 
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.verify;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
-import com.google.gcloud.datastore.DatastoreException.Code;
-import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException;
-import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException.Reason;
+import com.google.gcloud.BaseServiceException;
+import com.google.gcloud.RetryHelper;
 
 import org.junit.Test;
 
+import java.io.IOException;
+import java.net.SocketTimeoutException;
+
 public class DatastoreExceptionTest {
 
   @Test
-  public void testCode() throws Exception {
-    for (Reason reason : Reason.values()) {
-      Code code = Code.valueOf(reason.name());
-      assertEquals(reason.retryable(), code.retryable());
-      assertEquals(reason.description(), code.description());
-      assertEquals(reason.httpStatus(), code.httpStatus());
-    }
+  public void testDatastoreException() throws Exception {
+    DatastoreException exception = new DatastoreException(10, "message", "ABORTED");
+    assertEquals(10, exception.code());
+    assertEquals("ABORTED", exception.reason());
+    assertEquals("message", exception.getMessage());
+    assertTrue(exception.retryable());
+    assertTrue(exception.idempotent());
+
+    exception = new DatastoreException(4, "message", "DEADLINE_EXCEEDED");
+    assertEquals(4, exception.code());
+    assertEquals("DEADLINE_EXCEEDED", exception.reason());
+    assertEquals("message", exception.getMessage());
+    assertTrue(exception.retryable());
+    assertTrue(exception.idempotent());
+
+    exception = new DatastoreException(14, "message", "UNAVAILABLE");
+    assertEquals(14, exception.code());
+    assertEquals("UNAVAILABLE", exception.reason());
+    assertEquals("message", exception.getMessage());
+    assertTrue(exception.retryable());
+    assertTrue(exception.idempotent());
+
+    exception = new DatastoreException(2, "message", "INTERNAL");
+    assertEquals(2, exception.code());
+    assertEquals("INTERNAL", exception.reason());
+    assertEquals("message", exception.getMessage());
+    assertFalse(exception.retryable());
+    assertTrue(exception.idempotent());
+
+    IOException cause = new SocketTimeoutException();
+    exception = new DatastoreException(cause);
+    assertNull(exception.reason());
+    assertNull(exception.getMessage());
+    assertTrue(exception.retryable());
+    assertTrue(exception.idempotent());
 
-    DatastoreException exception = new DatastoreException(Code.ABORTED, "bla");
-    assertEquals(Code.ABORTED, exception.code());
   }
 
   @Test
   public void testTranslateAndThrow() throws Exception {
-    for (Reason reason : Reason.values()) {
-      try {
-        DatastoreException.translateAndThrow(new DatastoreRpcException(reason));
-        fail("Exception expected");
-      } catch (DatastoreException ex) {
-        assertEquals(reason.name(), ex.code().name());
-      }
+    DatastoreException cause = new DatastoreException(14, "message", "UNAVAILABLE");
+    RetryHelper.RetryHelperException exceptionMock = createMock(RetryHelper.RetryHelperException.class);
+    expect(exceptionMock.getCause()).andReturn(cause).times(2);
+    replay(exceptionMock);
+    try {
+      DatastoreException.translateAndThrow(exceptionMock);
+    } catch (BaseServiceException ex) {
+      assertEquals(14, ex.code());
+      assertEquals("message", ex.getMessage());
+      assertTrue(ex.retryable());
+      assertTrue(ex.idempotent());
+    } finally {
+      verify(exceptionMock);
     }
   }
 
@@ -58,7 +99,7 @@ public void testThrowInvalidRequest() throws Exception {
       DatastoreException.throwInvalidRequest("message %s %d", "a", 1);
       fail("Exception expected");
     } catch (DatastoreException ex) {
-      assertEquals(Code.FAILED_PRECONDITION, ex.code());
+      assertEquals("FAILED_PRECONDITION", ex.reason());
       assertEquals("message a 1", ex.getMessage());
     }
   }
diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreOptionsTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreOptionsTest.java
index 7cbd4c1dd230..c923cc761ce9 100644
--- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreOptionsTest.java
+++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreOptionsTest.java
@@ -29,8 +29,6 @@
 import org.junit.Before;
 import org.junit.Test;
 
-import java.io.IOException;
-
 public class DatastoreOptionsTest {
 
   private static final String PROJECT_ID = "project_id";
@@ -40,7 +38,7 @@ public class DatastoreOptionsTest {
   private DatastoreOptions.Builder options;
 
   @Before
-  public void setUp() throws IOException, InterruptedException {
+  public void setUp() {
     datastoreRpcFactory = EasyMock.createMock(DatastoreRpcFactory.class);
     datastoreRpc = EasyMock.createMock(DatastoreRpc.class);
     options = DatastoreOptions.builder()
@@ -72,8 +70,7 @@ public void testNamespace() throws Exception {
 
   @Test
   public void testDatastore() throws Exception {
-    assertSame(datastoreRpcFactory, options.build().serviceRpcFactory());
-    assertSame(datastoreRpc, options.build().datastoreRpc());
+    assertSame(datastoreRpc, options.build().rpc());
   }
 
   @Test
diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreTest.java
index 4725e0b6c880..002e5f6df04f 100644
--- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreTest.java
+++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/DatastoreTest.java
@@ -32,9 +32,8 @@
 import com.google.gcloud.datastore.StructuredQuery.PropertyFilter;
 import com.google.gcloud.datastore.testing.LocalGcdHelper;
 import com.google.gcloud.spi.DatastoreRpc;
-import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException;
-import com.google.gcloud.spi.DatastoreRpc.DatastoreRpcException.Reason;
 import com.google.gcloud.spi.DatastoreRpcFactory;
+import com.google.protobuf.ByteString;
 
 import org.easymock.EasyMock;
 import org.junit.AfterClass;
@@ -49,8 +48,10 @@
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collections;
+import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Set;
 
 @RunWith(JUnit4.class)
 public class DatastoreTest {
@@ -121,12 +122,13 @@ public static void beforeClass() throws IOException, InterruptedException {
   }
 
   @Before
-  public void setUp() throws IOException, InterruptedException {
+  public void setUp() {
     options = DatastoreOptions.builder()
         .projectId(PROJECT_ID)
         .host("localhost:" + PORT)
+        .retryParams(RetryParams.noRetries())
         .build();
-    datastore = DatastoreFactory.instance().get(options);
+    datastore = options.service();
     StructuredQuery query = Query.keyQueryBuilder().build();
     QueryResults result = datastore.run(query);
     datastore.delete(Iterators.toArray(result, Key.class));
@@ -197,7 +199,7 @@ public void testTransactionWithRead() {
       transaction.commit();
       fail("Expecting a failure");
     } catch (DatastoreException expected) {
-      assertEquals(DatastoreException.Code.ABORTED, expected.code());
+      assertEquals("ABORTED", expected.reason());
     }
   }
 
@@ -225,7 +227,7 @@ public void testTransactionWithQuery() {
       transaction.commit();
       fail("Expecting a failure");
     } catch (DatastoreException expected) {
-      assertEquals(DatastoreException.Code.ABORTED, expected.code());
+      assertEquals("ABORTED", expected.reason());
     }
   }
 
@@ -346,7 +348,8 @@ public void testNewBatch() {
 
   @Test
   public void testRunGqlQueryNoCasting() {
-    Query query1 = Query.gqlQueryBuilder(ResultType.ENTITY, "select * from " + KIND1).build();
+    Query query1 =
+        Query.gqlQueryBuilder(ResultType.ENTITY, "select * from " + KIND1).build();
     QueryResults results1 = datastore.run(query1);
     assertTrue(results1.hasNext());
     assertEquals(ENTITY1, results1.next());
@@ -417,7 +420,7 @@ public void testRunGqlQueryWithCasting() {
   }
 
   @Test
-  public void testGqlQueryPagination() throws DatastoreRpcException {
+  public void testGqlQueryPagination() throws DatastoreException {
     DatastoreRpcFactory rpcFactoryMock = EasyMock.createStrictMock(DatastoreRpcFactory.class);
     DatastoreRpc rpcMock = EasyMock.createStrictMock(DatastoreRpc.class);
     EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(DatastoreOptions.class)))
@@ -431,12 +434,11 @@ public void testGqlQueryPagination() throws DatastoreRpcException {
             .andReturn(responses.get(i));
     }
     EasyMock.replay(rpcFactoryMock, rpcMock);
-    DatastoreOptions options =
-        this.options.toBuilder()
-            .retryParams(RetryParams.getDefaultInstance())
-            .serviceRpcFactory(rpcFactoryMock)
-            .build();
-    Datastore mockDatastore = DatastoreFactory.instance().get(options);
+    DatastoreOptions options = this.options.toBuilder()
+        .retryParams(RetryParams.defaultInstance())
+        .serviceRpcFactory(rpcFactoryMock)
+        .build();
+    Datastore mockDatastore = options.service();
     QueryResults results =
         mockDatastore.run(Query.gqlQueryBuilder(ResultType.KEY, "select __key__ from *").build());
     int count = 0;
@@ -493,7 +495,7 @@ public void testRunStructuredQuery() {
   }
 
   @Test
-  public void testStructuredQueryPagination() throws DatastoreRpcException {
+  public void testStructuredQueryPagination() throws DatastoreException {
     DatastoreRpcFactory rpcFactoryMock = EasyMock.createStrictMock(DatastoreRpcFactory.class);
     DatastoreRpc rpcMock = EasyMock.createStrictMock(DatastoreRpc.class);
     EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(DatastoreOptions.class)))
@@ -507,12 +509,11 @@ public void testStructuredQueryPagination() throws DatastoreRpcException {
             .andReturn(responses.get(i));
     }
     EasyMock.replay(rpcFactoryMock, rpcMock);
-    DatastoreOptions options =
-        this.options.toBuilder()
-            .retryParams(RetryParams.getDefaultInstance())
-            .serviceRpcFactory(rpcFactoryMock)
-            .build();
-    Datastore mockDatastore = DatastoreFactory.instance().get(options);
+    DatastoreOptions options = this.options.toBuilder()
+        .retryParams(RetryParams.defaultInstance())
+        .serviceRpcFactory(rpcFactoryMock)
+        .build();
+    Datastore mockDatastore = options.service();
     QueryResults results = mockDatastore.run(Query.keyQueryBuilder().build());
     int count = 0;
     while (results.hasNext()) {
@@ -578,6 +579,128 @@ private List buildResponsesForQue
     return responses;
   }
 
+  public void testQueryPaginationWithLimit() throws DatastoreException {
+    DatastoreRpcFactory rpcFactoryMock = EasyMock.createStrictMock(DatastoreRpcFactory.class);
+    DatastoreRpc rpcMock = EasyMock.createStrictMock(DatastoreRpc.class);
+    EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(DatastoreOptions.class)))
+        .andReturn(rpcMock);
+    List responses =
+        buildResponsesForQueryPaginationWithLimit();
+    for (int i = 0; i < responses.size(); i++) {
+      EasyMock.expect(
+              rpcMock.runQuery(
+                  EasyMock.anyObject(com.google.datastore.v1beta3.RunQueryRequest.class)))
+          .andReturn(responses.get(i));
+    }
+    EasyMock.replay(rpcFactoryMock, rpcMock);
+    Datastore mockDatastore = options.toBuilder()
+        .retryParams(RetryParams.defaultInstance())
+        .serviceRpcFactory(rpcFactoryMock)
+        .build()
+        .service();
+    int limit = 2;
+    int totalCount = 0;
+    StructuredQuery query = Query.entityQueryBuilder().limit(limit).build();
+    while (true) {
+      QueryResults results = mockDatastore.run(query);
+      int resultCount = 0;
+      while (results.hasNext()) {
+        results.next();
+        resultCount++;
+        totalCount++;
+      }
+      if (resultCount < limit) {
+        break;
+      }
+      query = query.toBuilder().startCursor(results.cursorAfter()).build();
+    }
+    assertEquals(5, totalCount);
+    EasyMock.verify(rpcFactoryMock, rpcMock);
+  }
+
+  private List
+      buildResponsesForQueryPaginationWithLimit() {
+    Entity entity4 = Entity.builder(KEY4).set("value", StringValue.of("value")).build();
+    Entity entity5 = Entity.builder(KEY5).set("value", "value").build();
+    datastore.add(ENTITY3, entity4, entity5);
+    List responses = new ArrayList<>();
+    Query query = Query.entityQueryBuilder().build();
+    com.google.datastore.v1beta3.RunQueryRequest.Builder requestPb =
+        com.google.datastore.v1beta3.RunQueryRequest.newBuilder();
+    query.populatePb(requestPb);
+    com.google.datastore.v1beta3.QueryResultBatch queryResultBatchPb =
+        com.google.datastore.v1beta3.RunQueryResponse.newBuilder()
+            .mergeFrom(((DatastoreImpl) datastore).runQuery(requestPb.build()))
+            .getBatch();
+    com.google.datastore.v1beta3.QueryResultBatch queryResultBatchPb1 =
+        com.google.datastore.v1beta3.QueryResultBatch.newBuilder()
+            .mergeFrom(queryResultBatchPb)
+            .setMoreResults(
+                com.google.datastore.v1beta3.QueryResultBatch.MoreResultsType.NOT_FINISHED)
+            .clearEntityResults()
+            .addAllEntityResults(queryResultBatchPb.getEntityResultsList().subList(0, 1))
+            .setEndCursor(queryResultBatchPb.getEntityResultsList().get(0).getCursor())
+            .build();
+    responses.add(
+        com.google.datastore.v1beta3.RunQueryResponse.newBuilder()
+            .setBatch(queryResultBatchPb1)
+            .build());
+    com.google.datastore.v1beta3.QueryResultBatch queryResultBatchPb2 =
+        com.google.datastore.v1beta3.QueryResultBatch.newBuilder()
+            .mergeFrom(queryResultBatchPb)
+            .setMoreResults(
+                com.google.datastore.v1beta3.QueryResultBatch.MoreResultsType
+                    .MORE_RESULTS_AFTER_LIMIT)
+            .clearEntityResults()
+            .addAllEntityResults(queryResultBatchPb.getEntityResultsList().subList(1, 2))
+            .setEndCursor(
+                ByteString.copyFrom(new byte[] {(byte) 0x80})) // test invalid UTF-8 string
+            .build();
+    responses.add(
+        com.google.datastore.v1beta3.RunQueryResponse.newBuilder()
+            .setBatch(queryResultBatchPb2)
+            .build());
+    com.google.datastore.v1beta3.QueryResultBatch queryResultBatchPb3 =
+        com.google.datastore.v1beta3.QueryResultBatch.newBuilder()
+            .mergeFrom(queryResultBatchPb)
+            .setMoreResults(
+                com.google.datastore.v1beta3.QueryResultBatch.MoreResultsType
+                    .MORE_RESULTS_AFTER_LIMIT)
+            .clearEntityResults()
+            .addAllEntityResults(queryResultBatchPb.getEntityResultsList().subList(2, 4))
+            .setEndCursor(queryResultBatchPb.getEntityResultsList().get(3).getCursor())
+            .build();
+    responses.add(
+        com.google.datastore.v1beta3.RunQueryResponse.newBuilder()
+            .setBatch(queryResultBatchPb3)
+            .build());
+    com.google.datastore.v1beta3.QueryResultBatch queryResultBatchPb4 =
+        com.google.datastore.v1beta3.QueryResultBatch.newBuilder()
+            .mergeFrom(queryResultBatchPb)
+            .setMoreResults(
+                com.google.datastore.v1beta3.QueryResultBatch.MoreResultsType.NO_MORE_RESULTS)
+            .clearEntityResults()
+            .addAllEntityResults(queryResultBatchPb.getEntityResultsList().subList(4, 5))
+            .setEndCursor(queryResultBatchPb.getEntityResultsList().get(4).getCursor())
+            .build();
+    responses.add(
+        com.google.datastore.v1beta3.RunQueryResponse.newBuilder()
+            .setBatch(queryResultBatchPb4)
+            .build());
+    return responses;
+  }
+
+  @Test
+  public void testToUrlSafe() {
+    byte[][] invalidUtf8 =
+        new byte[][] {{(byte) 0xfe}, {(byte) 0xc1, (byte) 0xbf}, {(byte) 0xc0}, {(byte) 0x80}};
+    for (byte[] bytes : invalidUtf8) {
+      assertFalse(ByteString.copyFrom(bytes).isValidUtf8());
+      Cursor cursor = new Cursor(ByteString.copyFrom(bytes));
+      assertEquals(cursor, Cursor.fromUrlSafe(cursor.toUrlSafe()));
+    }
+  }
+
   @Test
   public void testAllocateId() {
     KeyFactory keyFactory = datastore.newKeyFactory().kind(KIND1);
@@ -643,7 +766,7 @@ public void testGet() {
   }
 
   @Test
-  public void testGetArray() {
+  public void testGetArrayNoDeferredResults() {
     datastore.put(ENTITY3);
     Iterator result =
         datastore.fetch(KEY1, Key.builder(KEY1).name("bla").build(), KEY2, KEY3).iterator();
@@ -670,7 +793,92 @@ public void testGetArray() {
       // expected - no such property
     }
     assertFalse(result.hasNext());
-    // TODO(ozarov): construct a test to verify more results
+  }
+
+  public void testGetArrayDeferredResults() throws DatastoreException {
+    Set requestedKeys = new HashSet<>();
+    requestedKeys.add(KEY1);
+    requestedKeys.add(KEY2);
+    requestedKeys.add(KEY3);
+    requestedKeys.add(KEY4);
+    requestedKeys.add(KEY5);
+    Iterator iter = createDatastoreForDeferredLookup().get(KEY1, KEY2, KEY3, KEY4, KEY5);
+    Set keysOfFoundEntities = new HashSet<>();
+    while (iter.hasNext()) {
+      keysOfFoundEntities.add(iter.next().key());
+    }
+    assertEquals(requestedKeys, keysOfFoundEntities);
+  }
+
+  public void testFetchArrayDeferredResults() throws DatastoreException {
+    List foundEntities =
+        createDatastoreForDeferredLookup().fetch(KEY1, KEY2, KEY3, KEY4, KEY5);
+    assertEquals(foundEntities.get(0).key(), KEY1);
+    assertEquals(foundEntities.get(1).key(), KEY2);
+    assertEquals(foundEntities.get(2).key(), KEY3);
+    assertEquals(foundEntities.get(3).key(), KEY4);
+    assertEquals(foundEntities.get(4).key(), KEY5);
+    assertEquals(foundEntities.size(), 5);
+  }
+
+  private Datastore createDatastoreForDeferredLookup() throws DatastoreException {
+    List keysPb = new ArrayList<>();
+    keysPb.add(KEY1.toPb());
+    keysPb.add(KEY2.toPb());
+    keysPb.add(KEY3.toPb());
+    keysPb.add(KEY4.toPb());
+    keysPb.add(KEY5.toPb());
+    List lookupRequests = new ArrayList<>();
+    lookupRequests.add(
+        com.google.datastore.v1beta3.LookupRequest.newBuilder().addAllKeys(keysPb).build());
+    lookupRequests.add(
+        com.google.datastore.v1beta3.LookupRequest.newBuilder()
+            .addKeys(keysPb.get(2))
+            .addKeys(keysPb.get(3))
+            .addKeys(keysPb.get(5))
+            .build());
+    lookupRequests.add(
+        com.google.datastore.v1beta3.LookupRequest.newBuilder().addKeys(keysPb.get(5)).build());
+    Entity entity4 = Entity.builder(KEY4).set("value", StringValue.of("value")).build();
+    Entity entity5 = Entity.builder(KEY5).set("value", "value").build();
+    List lookupResponses = new ArrayList<>();
+    lookupResponses.add(
+        com.google.datastore.v1beta3.LookupResponse.newBuilder()
+            .addFound(
+                com.google.datastore.v1beta3.EntityResult.newBuilder().setEntity(ENTITY1.toPb()))
+            .addFound(
+                com.google.datastore.v1beta3.EntityResult.newBuilder().setEntity(entity4.toPb()))
+            .addDeferred(keysPb.get(2))
+            .addDeferred(keysPb.get(3))
+            .addDeferred(keysPb.get(5))
+            .build());
+    lookupResponses.add(
+        com.google.datastore.v1beta3.LookupResponse.newBuilder()
+            .addFound(
+                com.google.datastore.v1beta3.EntityResult.newBuilder().setEntity(ENTITY3.toPb()))
+            .addFound(
+                com.google.datastore.v1beta3.EntityResult.newBuilder().setEntity(entity4.toPb()))
+            .addDeferred(keysPb.get(5))
+            .build());
+    lookupResponses.add(
+        com.google.datastore.v1beta3.LookupResponse.newBuilder()
+            .addFound(
+                com.google.datastore.v1beta3.EntityResult.newBuilder().setEntity(entity5.toPb()))
+            .build());
+    DatastoreRpcFactory rpcFactoryMock = EasyMock.createStrictMock(DatastoreRpcFactory.class);
+    DatastoreRpc rpcMock = EasyMock.createStrictMock(DatastoreRpc.class);
+    EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(DatastoreOptions.class)))
+        .andReturn(rpcMock);
+    for (int i = 0; i < lookupRequests.size(); i++) {
+      EasyMock.expect(rpcMock.lookup(lookupRequests.get(i))).andReturn(lookupResponses.get(i));
+    }
+    EasyMock.replay(rpcFactoryMock, rpcMock);
+    DatastoreOptions options =
+        this.options.toBuilder()
+            .retryParams(RetryParams.defaultInstance())
+            .serviceRpcFactory(rpcFactoryMock)
+            .build();
+    return options.service();
   }
 
   @Test
@@ -778,14 +986,14 @@ public void testRetryableException() throws Exception {
     EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(DatastoreOptions.class)))
         .andReturn(rpcMock);
     EasyMock.expect(rpcMock.lookup(requestPb))
-        .andThrow(new DatastoreRpc.DatastoreRpcException(Reason.UNAVAILABLE))
+        .andThrow(new DatastoreException(14, "UNAVAILABLE", "UNAVAILABLE", null))
         .andReturn(responsePb);
     EasyMock.replay(rpcFactoryMock, rpcMock);
     DatastoreOptions options = this.options.toBuilder()
-        .retryParams(RetryParams.getDefaultInstance())
+        .retryParams(RetryParams.defaultInstance())
         .serviceRpcFactory(rpcFactoryMock)
         .build();
-    Datastore datastore = DatastoreFactory.instance().get(options);
+    Datastore datastore = options.service();
     Entity entity = datastore.get(KEY1);
     assertEquals(ENTITY1, entity);
     EasyMock.verify(rpcFactoryMock, rpcMock);
@@ -800,7 +1008,8 @@ public void testNonRetryableException() throws Exception {
     EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(DatastoreOptions.class)))
         .andReturn(rpcMock);
     EasyMock.expect(rpcMock.lookup(requestPb))
-        .andThrow(new DatastoreRpc.DatastoreRpcException(Reason.PERMISSION_DENIED))
+        .andThrow(
+            new DatastoreException(DatastoreException.UNKNOWN_CODE, "denied", "PERMISSION_DENIED"))
         .times(1);
     EasyMock.replay(rpcFactoryMock, rpcMock);
     RetryParams retryParams = RetryParams.builder().retryMinAttempts(2).build();
@@ -808,9 +1017,9 @@ public void testNonRetryableException() throws Exception {
         .retryParams(retryParams)
         .serviceRpcFactory(rpcFactoryMock)
         .build();
-    Datastore datastore = DatastoreFactory.instance().get(options);
+    Datastore datastore = options.service();
     thrown.expect(DatastoreException.class);
-    thrown.expectMessage(Reason.PERMISSION_DENIED.description());
+    thrown.expectMessage("denied");
     datastore.get(KEY1);
     EasyMock.verify(rpcFactoryMock, rpcMock);
   }
@@ -828,10 +1037,10 @@ public void testRuntimeException() throws Exception {
         .andThrow(new RuntimeException(exceptionMessage));
     EasyMock.replay(rpcFactoryMock, rpcMock);
     DatastoreOptions options = this.options.toBuilder()
-        .retryParams(RetryParams.getDefaultInstance())
+        .retryParams(RetryParams.defaultInstance())
         .serviceRpcFactory(rpcFactoryMock)
         .build();
-    Datastore datastore = DatastoreFactory.instance().get(options);
+    Datastore datastore = options.service();
     thrown.expect(DatastoreException.class);
     thrown.expectMessage(exceptionMessage);
     datastore.get(KEY1);
diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/ListValueTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/ListValueTest.java
index 74b74facfe45..6245c715d476 100644
--- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/ListValueTest.java
+++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/ListValueTest.java
@@ -28,7 +28,8 @@
 
 public class ListValueTest {
 
-  private static final List> CONTENT = ImmutableList.of(NullValue.of(), StringValue.of("foo"));
+  private static final List> CONTENT =
+      ImmutableList.of(NullValue.of(), StringValue.of("foo"));
 
   @Test
   public void testToBuilder() throws Exception {
diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/LocalGcdHelperTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/LocalGcdHelperTest.java
new file mode 100644
index 000000000000..40ea62c5a7e0
--- /dev/null
+++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/LocalGcdHelperTest.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.gcloud.datastore;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import com.google.gcloud.datastore.testing.LocalGcdHelper;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+import java.io.IOException;
+import java.net.ServerSocket;
+
+@RunWith(JUnit4.class)
+public class LocalGcdHelperTest {
+
+  private static final String PROJECT_ID = LocalGcdHelper.DEFAULT_PROJECT_ID;
+  private static final int PORT = LocalGcdHelper.findAvailablePort(LocalGcdHelper.DEFAULT_PORT);
+
+  @Test
+  public void testFindAvailablePort() {
+    int chosenPort = LocalGcdHelper.findAvailablePort(LocalGcdHelper.DEFAULT_PORT);
+    try (ServerSocket tempSocket = new ServerSocket(chosenPort)) {
+      // success
+    } catch (IOException e) {
+      if (chosenPort != LocalGcdHelper.DEFAULT_PORT) {
+        fail("Chosen port not free, even though LocalGcdHelper claimed it was.");
+      }
+    }
+  }
+
+  @Test
+  public void testSendQuitRequest() throws IOException, InterruptedException {
+    LocalGcdHelper gcdHelper = LocalGcdHelper.start(PROJECT_ID, PORT);
+    assertTrue(LocalGcdHelper.sendQuitRequest(PORT));
+    long timeoutMillis = 30000;
+    long startTime = System.currentTimeMillis();
+    boolean datastoreActive = LocalGcdHelper.isActive(PROJECT_ID, PORT);
+    while (datastoreActive && System.currentTimeMillis() - startTime < timeoutMillis) {
+      datastoreActive = LocalGcdHelper.isActive(PROJECT_ID, PORT);
+    }
+    assertFalse(datastoreActive);
+    assertFalse(LocalGcdHelper.sendQuitRequest(PORT));
+    gcdHelper.stop();
+  }
+
+  @Test
+  public void testStartStop() throws IOException, InterruptedException {
+    LocalGcdHelper gcdHelper = LocalGcdHelper.start(PROJECT_ID, PORT);
+    assertFalse(LocalGcdHelper.isActive("wrong-project-id", PORT));
+    assertTrue(LocalGcdHelper.isActive(PROJECT_ID, PORT));
+    gcdHelper.stop();
+    assertFalse(LocalGcdHelper.isActive(PROJECT_ID, PORT));
+  }
+}
diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/SerializationTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/SerializationTest.java
index a8cbcb294b39..51650f2062d8 100644
--- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/SerializationTest.java
+++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/SerializationTest.java
@@ -47,8 +47,8 @@ public class SerializationTest {
   private static final DateTime DATE_TIME1 = DateTime.now();
   private static final LatLng LAT_LNG = new LatLng(37.422035, -122.084124);
   private static final Blob BLOB1 = Blob.copyFrom(UTF_8.encode("hello world"));
-  private static final Cursor CURSOR1 = Cursor.copyFrom(new byte[] {1,2});
-  private static final Cursor CURSOR2 = Cursor.copyFrom(new byte[]{10});
+  private static final Cursor CURSOR1 = Cursor.copyFrom(new byte[] {1, 2});
+  private static final Cursor CURSOR2 = Cursor.copyFrom(new byte[] {10});
   private static final Query GQL1 =
       Query.gqlQueryBuilder("select * from kind1 where name = @name and age > @1")
       .setBinding("name", "name1")
@@ -145,8 +145,8 @@ public void testServiceOptions() throws Exception {
 
     options = options.toBuilder()
         .namespace("ns1")
-        .retryParams(RetryParams.getDefaultInstance())
-        .authCredentials(AuthCredentials.noCredentials())
+        .retryParams(RetryParams.defaultInstance())
+        .authCredentials(null)
         .build();
     serializedCopy = serializeAndDeserialize(options);
     assertEquals(options, serializedCopy);
diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/StructuredQueryTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/StructuredQueryTest.java
new file mode 100644
index 000000000000..ae6a4ca92787
--- /dev/null
+++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/StructuredQueryTest.java
@@ -0,0 +1,168 @@
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.gcloud.datastore;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import com.google.common.collect.ImmutableList;
+import com.google.gcloud.datastore.Query.ResultType;
+import com.google.gcloud.datastore.StructuredQuery.CompositeFilter;
+import com.google.gcloud.datastore.StructuredQuery.Filter;
+import com.google.gcloud.datastore.StructuredQuery.OrderBy;
+import com.google.gcloud.datastore.StructuredQuery.PropertyFilter;
+
+import org.junit.Test;
+
+import java.util.List;
+
+public class StructuredQueryTest {
+
+  private static final String NAMESPACE = "ns";
+  private static final String KIND = "k";
+  private static final Cursor START_CURSOR = Cursor.copyFrom(new byte[] {1, 2});
+  private static final Cursor END_CURSOR = Cursor.copyFrom(new byte[] {10});
+  private static final int OFFSET = 42;
+  private static final Integer LIMIT = 43;
+  private static final Filter FILTER = CompositeFilter.and(PropertyFilter.gt("p1", 10), PropertyFilter.eq("a", "v"));
+  private static final OrderBy ORDER_BY_1 = OrderBy.asc("p2");
+  private static final OrderBy ORDER_BY_2 = OrderBy.desc("p3");
+  private static final List ORDER_BY = ImmutableList.of(ORDER_BY_1, ORDER_BY_2);
+  private static final String PROJECTION1 = "p4";
+  private static final String PROJECTION2 = "p5";
+  private static final List PROJECTION = ImmutableList.of(PROJECTION1, PROJECTION2);
+  private static final String DISTINCT_ON1 = "p6";
+  private static final String DISTINCT_ON2 = "p7";
+  private static final List DISTINCT_ON = ImmutableList.of(DISTINCT_ON1, DISTINCT_ON2);
+  private static final EntityQuery ENTITY_QUERY = Query.entityQueryBuilder()
+      .namespace(NAMESPACE)
+      .kind(KIND)
+      .startCursor(START_CURSOR)
+      .endCursor(END_CURSOR)
+      .offset(OFFSET)
+      .limit(LIMIT)
+      .filter(FILTER)
+      .orderBy(ORDER_BY_1, ORDER_BY_2)
+      .build();
+  private static final KeyQuery KEY_QUERY = Query.keyQueryBuilder()
+      .namespace(NAMESPACE)
+      .kind(KIND)
+      .startCursor(START_CURSOR)
+      .endCursor(END_CURSOR)
+      .offset(OFFSET)
+      .limit(LIMIT)
+      .filter(FILTER)
+      .orderBy(ORDER_BY_1, ORDER_BY_2)
+      .build();
+  private static final ProjectionEntityQuery PROJECTION_QUERY =
+      Query.projectionEntityQueryBuilder()
+          .namespace(NAMESPACE)
+          .kind(KIND)
+          .startCursor(START_CURSOR)
+          .endCursor(END_CURSOR)
+          .offset(OFFSET)
+          .limit(LIMIT)
+          .filter(FILTER)
+          .orderBy(ORDER_BY_1, ORDER_BY_2)
+          .projection(PROJECTION1, PROJECTION2)
+          .distinctOn(DISTINCT_ON1, DISTINCT_ON2)
+          .build();
+
+  @Test
+  public void testEntityQueryBuilder() {
+    compareBaseBuilderFields(ENTITY_QUERY);
+    assertTrue(ENTITY_QUERY.projection().isEmpty());
+    assertTrue(ENTITY_QUERY.distinctOn().isEmpty());
+  }
+
+  @Test
+  public void testKeyQueryBuilder() {
+    compareBaseBuilderFields(KEY_QUERY);
+    assertEquals(ImmutableList.of(StructuredQuery.KEY_PROPERTY_NAME), KEY_QUERY.projection());
+    assertTrue(KEY_QUERY.distinctOn().isEmpty());
+  }
+
+  @Test
+  public void testProjectionEntityQueryBuilder() {
+    compareBaseBuilderFields(PROJECTION_QUERY);
+    assertEquals(PROJECTION, PROJECTION_QUERY.projection());
+    assertEquals(DISTINCT_ON, PROJECTION_QUERY.distinctOn());
+  }
+
+  private void compareBaseBuilderFields(StructuredQuery query) {
+    assertEquals(NAMESPACE, query.namespace());
+    assertEquals(KIND, query.kind());
+    assertEquals(START_CURSOR, query.startCursor());
+    assertEquals(END_CURSOR, query.endCursor());
+    assertEquals(OFFSET, query.offset());
+    assertEquals(LIMIT, query.limit());
+    assertEquals(FILTER, query.filter());
+    assertEquals(ORDER_BY, query.orderBy());
+  }
+
+  @Test
+  public void mergeFrom() {
+    compareMergedQuery(
+        ENTITY_QUERY, new EntityQuery.Builder().mergeFrom(ENTITY_QUERY.toPb()).build());
+    compareMergedQuery(KEY_QUERY, new KeyQuery.Builder().mergeFrom(KEY_QUERY.toPb()).build());
+    compareMergedQuery(
+        PROJECTION_QUERY,
+        new ProjectionEntityQuery.Builder().mergeFrom(PROJECTION_QUERY.toPb()).build());
+  }
+
+  private void compareMergedQuery(StructuredQuery expected, StructuredQuery actual) {
+    assertEquals(expected.kind(), actual.kind());
+    assertEquals(expected.startCursor(), actual.startCursor());
+    assertEquals(expected.endCursor(), actual.endCursor());
+    assertEquals(expected.offset(), actual.offset());
+    assertEquals(expected.limit(), actual.limit());
+    assertEquals(expected.filter(), actual.filter());
+    assertEquals(expected.orderBy(), actual.orderBy());
+    assertEquals(expected.projection(), actual.projection());
+    assertEquals(expected.distinctOn(), actual.distinctOn());
+  }
+
+  @Test
+  public void testToAndFromPb() {
+    assertEquals(
+        ENTITY_QUERY,
+        StructuredQuery.fromPb(ResultType.ENTITY, ENTITY_QUERY.namespace(), ENTITY_QUERY.toPb()));
+    assertEquals(
+        KEY_QUERY, StructuredQuery.fromPb(ResultType.KEY, KEY_QUERY.namespace(), KEY_QUERY.toPb()));
+    assertEquals(
+        PROJECTION_QUERY,
+        StructuredQuery.fromPb(
+            ResultType.PROJECTION_ENTITY, PROJECTION_QUERY.namespace(), PROJECTION_QUERY.toPb()));
+  }
+
+  @Test
+  public void testToBuilder() {
+    List> queries =
+        ImmutableList.>of(ENTITY_QUERY, KEY_QUERY, PROJECTION_QUERY);
+    for (StructuredQuery query : queries) {
+      assertEquals(query, query.toBuilder().build());
+    }
+  }
+
+  @Test
+  public void testKeyOnly() {
+    assertTrue(KEY_QUERY.keyOnly());
+    assertFalse(ENTITY_QUERY.keyOnly());
+    assertFalse(PROJECTION_QUERY.keyOnly());
+  }
+}
diff --git a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/ValueTest.java b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/ValueTest.java
index e433ca9a0666..fb718dffe1e5 100644
--- a/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/ValueTest.java
+++ b/gcloud-java-datastore/src/test/java/com/google/gcloud/datastore/ValueTest.java
@@ -62,11 +62,13 @@ public class ValueTest {
 
   private ImmutableMap> typeToValue;
 
+  @SuppressWarnings("rawtypes")
   private class TestBuilder extends Value.BaseBuilder, TestBuilder> {
     TestBuilder() {
       super(ValueType.LIST);
     }
 
+    @SuppressWarnings({"unchecked"})
     @Override
     public Value build() {
       return new Value(this) {
@@ -123,7 +125,6 @@ public void testExcludeFromIndexes() throws Exception {
     for (Map.Entry> entry : typeToValue.entrySet()) {
       assertFalse(entry.getValue().excludeFromIndexes());
     }
-
     TestBuilder builder = new TestBuilder();
     assertFalse(builder.build().excludeFromIndexes());
     assertTrue(builder.excludeFromIndexes(true).build().excludeFromIndexes());
@@ -154,6 +155,7 @@ public void testGet() throws Exception {
   @Test
   public void testToBuilder() throws Exception {
     Set content = Collections.singleton("bla");
+    @SuppressWarnings("rawtypes")
     ValueBuilder builder = new TestBuilder();
     builder.meaning(1).set(content).excludeFromIndexes(true);
     Value value = builder.build();
diff --git a/gcloud-java-examples/README.md b/gcloud-java-examples/README.md
index 366acd5de929..8030d14d09e7 100644
--- a/gcloud-java-examples/README.md
+++ b/gcloud-java-examples/README.md
@@ -12,43 +12,86 @@ Examples for gcloud-java (Java idiomatic client for [Google Cloud Platform][clou
 
 Quickstart
 ----------
-Add this to your pom.xml file
+If you are using Maven, add this to your pom.xml file
 ```xml
 
   com.google.gcloud
   gcloud-java-examples
-  0.0.10
+  0.1.3
 
 ```
+If you are using Gradle, add this to your dependencies
+```Groovy
+compile 'com.google.gcloud:gcloud-java-examples:0.1.3'
+```
+If you are using SBT, add this to your dependencies
+```Scala
+libraryDependencies += "com.google.gcloud" % "gcloud-java-examples" % "0.1.3"
+```
 
 To run examples from your command line:
 
 1. Login using gcloud SDK (`gcloud auth login` in command line)
 
-2. Set your current project using `gcloud config set project PROJECT_ID`
+2. Set your current project using `gcloud config set project PROJECT_ID`. This step is not necessary for `ResourceManagerExample`.
 
 3. Compile using Maven (`mvn compile` in command line from your base project directory)
 
 4. Run an example using Maven from command line.
 
-  Here's an example run of `DatastoreExample`.
+  * Here's an example run of `BigQueryExample`.
+
+    Before running the example, go to the [Google Developers Console][developers-console] to ensure
+    that BigQuery API is enabled. You can upload a CSV file `my_csv_file` to the `my_bucket` bucket
+    (replace `my_csv_file` and `my_bucket` with actual file and bucket names) using the GCS
+    [web browser](https://console.developers.google.com/storage/browser). The CSV file will be used to
+    load data into a BigQuery table and should look something like:
+    ```csv
+    value1
+    value2
+    value3
+    ```
+    Then you are ready to run the following example:
+    ```
+    mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.BigQueryExample" -Dexec.args="create dataset new_dataset_id"
+    mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.BigQueryExample" -Dexec.args="create table new_dataset_id new_table_id field_name:string"
+    mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.BigQueryExample" -Dexec.args="list tables new_dataset_id"
+    mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.BigQueryExample" -Dexec.args="load new_dataset_id new_table_id CSV gs://my_bucket/my_csv_file"
+    mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.BigQueryExample" -Dexec.args="query 'select * from new_dataset_id.new_table_id'"
+    ```
+
+  * Here's an example run of `DatastoreExample`.
   
-  Note that you have to enable the Google Cloud Datastore API on the [Google Developers Console][developers-console] before running the following commands.
-  ```
-  $mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.DatastoreExample" -Dexec.args="my_name add my\ comment"
-  $mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.DatastoreExample" -Dexec.args="my_name display"
-  $mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.DatastoreExample" -Dexec.args="my_name delete"
-  ```
-
-  Here's an example run of `StorageExample`.
-
-  Before running the example, go to the [Google Developers Console][developers-console] to ensure that Google Cloud Storage API is enabled and that you have a bucket.  Also ensure that you have a test file (`test.txt` is chosen here) to upload to Cloud Storage stored locally on your machine.
-  ```
-  $mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.StorageExample" -Dexec.args="upload /path/to/test.txt "
-  $mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.StorageExample" -Dexec.args="list "
-  $mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.StorageExample" -Dexec.args="download  test.txt"
-  $mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.StorageExample" -Dexec.args="delete  test.txt"
-```
+    Be sure to change the placeholder project ID "your-project-id" with your own project ID. Also note that you have to enable the Google Cloud Datastore API on the [Google Developers Console][developers-console] before running the following commands.
+    ```
+    mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.DatastoreExample" -Dexec.args="your-project-id my_name add my\ comment"
+    mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.DatastoreExample" -Dexec.args="your-project-id my_name display"
+    mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.DatastoreExample" -Dexec.args="your-project-id my_name delete"
+    ```
+
+  * Here's an example run of `ResourceManagerExample`.
+
+    Be sure to change the placeholder project ID "your-project-id" with your own globally unique project ID.
+    ```
+    mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.ResourceManagerExample" -Dexec.args="create your-project-id"
+    mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.ResourceManagerExample" -Dexec.args="list"
+    mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.ResourceManagerExample" -Dexec.args="get your-project-id"
+    ```
+
+  * Here's an example run of `StorageExample`.
+
+    Before running the example, go to the [Google Developers Console][developers-console] to ensure that Google Cloud Storage API is enabled and that you have a bucket.  Also ensure that you have a test file (`test.txt` is chosen here) to upload to Cloud Storage stored locally on your machine.
+    ```
+    mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.StorageExample" -Dexec.args="upload /path/to/test.txt "
+    mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.StorageExample" -Dexec.args="list "
+    mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.StorageExample" -Dexec.args="download  test.txt"
+    mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.StorageExample" -Dexec.args="delete  test.txt"
+    ```
+
+Troubleshooting
+---------------
+
+To get help, follow the `gcloud-java` links in the `gcloud-*` [shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting).
 
 Java Versions
 -------------
@@ -69,7 +112,9 @@ Contributing
 
 Contributions to this library are always welcome and highly encouraged.
 
-See [CONTRIBUTING] for more information on how to get started.
+See `gcloud-java`'s [CONTRIBUTING] documentation and the `gcloud-*` [shared documentation](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/contributing/readme.md#how-to-contribute-to-gcloud) for more information on how to get started.
+
+Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information.
 
 License
 -------
@@ -78,6 +123,7 @@ Apache 2.0 - See [LICENSE] for more information.
 
 
 [CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md
+[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct
 [LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE
 [cloud-platform]: https://cloud.google.com/
 [developers-console]:https://console.developers.google.com/
diff --git a/gcloud-java-examples/pom.xml b/gcloud-java-examples/pom.xml
index d6a990ed8b7c..5597f1f44132 100644
--- a/gcloud-java-examples/pom.xml
+++ b/gcloud-java-examples/pom.xml
@@ -11,8 +11,11 @@
   
     com.google.gcloud
     gcloud-java-pom
-    0.0.11-SNAPSHOT
+    0.1.4-SNAPSHOT
   
+  
+    gcloud-java-examples
+  
   
     
       ${project.groupId}
diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/BigQueryExample.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/BigQueryExample.java
new file mode 100644
index 000000000000..8fe78cbd50ad
--- /dev/null
+++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/BigQueryExample.java
@@ -0,0 +1,792 @@
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *       http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.gcloud.examples;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.gcloud.WriteChannel;
+import com.google.gcloud.bigquery.BaseTableInfo;
+import com.google.gcloud.bigquery.BigQuery;
+import com.google.gcloud.bigquery.BigQueryError;
+import com.google.gcloud.bigquery.BigQueryOptions;
+import com.google.gcloud.bigquery.CopyJobConfiguration;
+import com.google.gcloud.bigquery.DatasetId;
+import com.google.gcloud.bigquery.DatasetInfo;
+import com.google.gcloud.bigquery.ExternalDataConfiguration;
+import com.google.gcloud.bigquery.ExternalTableInfo;
+import com.google.gcloud.bigquery.ExtractJobConfiguration;
+import com.google.gcloud.bigquery.Field;
+import com.google.gcloud.bigquery.FieldValue;
+import com.google.gcloud.bigquery.FormatOptions;
+import com.google.gcloud.bigquery.JobId;
+import com.google.gcloud.bigquery.JobInfo;
+import com.google.gcloud.bigquery.JobStatus;
+import com.google.gcloud.bigquery.WriteChannelConfiguration;
+import com.google.gcloud.bigquery.LoadJobConfiguration;
+import com.google.gcloud.bigquery.QueryRequest;
+import com.google.gcloud.bigquery.QueryResponse;
+import com.google.gcloud.bigquery.Schema;
+import com.google.gcloud.bigquery.TableId;
+import com.google.gcloud.bigquery.TableInfo;
+import com.google.gcloud.bigquery.ViewInfo;
+import com.google.gcloud.spi.BigQueryRpc.Tuple;
+
+import java.nio.channels.FileChannel;
+import java.nio.file.Paths;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * An example of using Google BigQuery.
+ *
+ * 

This example demonstrates a simple/typical BigQuery usage. + * + *

Steps needed for running the example: + *

    + *
  1. login using gcloud SDK - {@code gcloud auth login}.
  2. + *
  3. compile using maven - {@code mvn compile}
  4. + *
  5. run using maven - + *
    {@code mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.BigQueryExample"
    + *  -Dexec.args="[]
    + *  list datasets |
    + *  list tables  |
    + *  list jobs |
    + *  list data  
| + * info dataset | + * info table
| + * info job | + * create dataset | + * create table
(:)+ | + * create view
| + * create external-table
(:)+ | + * delete dataset | + * delete table
| + * cancel | + * copy | + * load
+ | + * extract
+ | + * query | + * load-file
"} + * + * + * + *

The first parameter is an optional {@code project_id} (logged-in project will be used if not + * supplied). Second parameter is a BigQuery operation and can be used to demonstrate its usage. For + * operations that apply to more than one entity (`list`, `create`, `info` and `delete`) the third + * parameter specifies the entity. {@code } indicates that only primitive types are + * supported by the {@code create table} and {@code create external-table} operations + * ({@code string}, {@code float}, {@code integer}, {@code timestamp}, {@code boolean}). + * {@code }, {@code } and {@code } parameters are URIs to + * Google Cloud Storage blobs, in the form {@code gs://bucket/path}. See each action's run method + * for the specific BigQuery interaction. + */ +public class BigQueryExample { + + private static final int CHUNK_SIZE = 8 * 256 * 1024; + private static final Map CREATE_ACTIONS = new HashMap<>(); + private static final Map INFO_ACTIONS = new HashMap<>(); + private static final Map LIST_ACTIONS = new HashMap<>(); + private static final Map DELETE_ACTIONS = new HashMap<>(); + private static final Map ACTIONS = new HashMap<>(); + + private abstract static class BigQueryAction { + + abstract void run(BigQuery bigquery, T request) throws Exception; + + abstract T parse(String... args) throws Exception; + + protected String params() { + return ""; + } + } + + private static class ParentAction extends BigQueryAction> { + + private final Map subActions; + + public ParentAction(Map subActions) { + this.subActions = ImmutableMap.copyOf(subActions); + } + + @Override + @SuppressWarnings("unchecked") + void run(BigQuery bigquery, Tuple subaction) throws Exception { + subaction.x().run(bigquery, subaction.y()); + } + + @Override + Tuple parse(String... args) throws Exception { + if (args.length >= 1) { + BigQueryAction action = subActions.get(args[0]); + if (action != null) { + Object actionArguments = action.parse(Arrays.copyOfRange(args, 1, args.length)); + return Tuple.of(action, actionArguments); + } else { + throw new IllegalArgumentException("Unrecognized entity '" + args[0] + "'."); + } + } + throw new IllegalArgumentException("Missing required entity."); + } + + @Override + public String params() { + StringBuilder builder = new StringBuilder(); + for (Map.Entry entry : subActions.entrySet()) { + builder.append('\n').append(entry.getKey()); + String param = entry.getValue().params(); + if (param != null && !param.isEmpty()) { + builder.append(' ').append(param); + } + } + return builder.toString(); + } + } + + private abstract static class NoArgsAction extends BigQueryAction { + @Override + Void parse(String... args) throws Exception { + if (args.length == 0) { + return null; + } + throw new IllegalArgumentException("This action takes no arguments."); + } + } + + /** + * This class demonstrates how to list BigQuery Datasets. + * + * @see Datasets: list + * + */ + private static class ListDatasetsAction extends NoArgsAction { + @Override + public void run(BigQuery bigquery, Void arg) { + Iterator datasetInfoIterator = bigquery.listDatasets().iterateAll(); + while (datasetInfoIterator.hasNext()) { + System.out.println(datasetInfoIterator.next()); + } + } + } + + private abstract static class DatasetAction extends BigQueryAction { + @Override + DatasetId parse(String... args) throws Exception { + String message; + if (args.length == 1) { + return DatasetId.of(args[0]); + } else if (args.length > 1) { + message = "Too many arguments."; + } else { + message = "Missing required dataset id."; + } + throw new IllegalArgumentException(message); + } + + @Override + public String params() { + return ""; + } + } + + /** + * This class demonstrates how to list BigQuery Tables in a Dataset. + * + * @see Tables: list + */ + private static class ListTablesAction extends DatasetAction { + @Override + public void run(BigQuery bigquery, DatasetId datasetId) { + Iterator tableInfoIterator = bigquery.listTables(datasetId).iterateAll(); + while (tableInfoIterator.hasNext()) { + System.out.println(tableInfoIterator.next()); + } + } + } + + /** + * This class demonstrates how to retrieve information on a BigQuery Dataset. + * + * @see Datasets: get + * + */ + private static class DatasetInfoAction extends DatasetAction { + @Override + public void run(BigQuery bigquery, DatasetId datasetId) { + System.out.println("Dataset info: " + bigquery.getDataset(datasetId)); + } + } + + /** + * This class demonstrates how to create a BigQuery Dataset. + * + * @see Datasets: + * insert + */ + private static class CreateDatasetAction extends DatasetAction { + @Override + public void run(BigQuery bigquery, DatasetId datasetId) { + bigquery.create(DatasetInfo.builder(datasetId).build()); + System.out.println("Created dataset " + datasetId); + } + } + + /** + * This class demonstrates how to delete a BigQuery Dataset. + * + * @see Datasets: + * delete + */ + private static class DeleteDatasetAction extends DatasetAction { + @Override + public void run(BigQuery bigquery, DatasetId datasetId) { + if (bigquery.delete(datasetId)) { + System.out.println("Dataset " + datasetId + " was deleted"); + } else { + System.out.println("Dataset " + datasetId + " not found"); + } + } + } + + private abstract static class TableAction extends BigQueryAction { + @Override + TableId parse(String... args) throws Exception { + String message; + if (args.length == 2) { + return TableId.of(args[0], args[1]); + } else if (args.length < 2) { + message = "Missing required dataset and table id."; + } else { + message = "Too many arguments."; + } + throw new IllegalArgumentException(message); + } + + @Override + public String params() { + return "

"; + } + } + + /** + * This class demonstrates how to retrieve information on a BigQuery Table. + * + * @see Tables: get + */ + private static class TableInfoAction extends TableAction { + @Override + public void run(BigQuery bigquery, TableId tableId) { + System.out.println("Table info: " + bigquery.getTable(tableId)); + } + } + + /** + * This class demonstrates how to delete a BigQuery Table. + * + * @see Tables: delete + * + */ + private static class DeleteTableAction extends TableAction { + @Override + public void run(BigQuery bigquery, TableId tableId) { + if (bigquery.delete(tableId)) { + System.out.println("Table " + tableId + " was deleted"); + } else { + System.out.println("Table " + tableId + " not found"); + } + } + } + + /** + * This class demonstrates how to list the rows in a BigQuery Table. + * + * @see Tabledata: + * list + */ + private static class ListTableDataAction extends TableAction { + @Override + public void run(BigQuery bigquery, TableId tableId) { + Iterator> iterator = bigquery.listTableData(tableId).iterateAll(); + while (iterator.hasNext()) { + System.out.println(iterator.next()); + } + } + } + + private abstract static class JobAction extends BigQueryAction { + @Override + JobId parse(String... args) throws Exception { + String message; + if (args.length == 1) { + return JobId.of(args[0]); + } else if (args.length > 1) { + message = "Too many arguments."; + } else { + message = "Missing required query."; + } + throw new IllegalArgumentException(message); + } + + @Override + public String params() { + return ""; + } + } + + /** + * This class demonstrates how to list BigQuery Jobs. + * + * @see Jobs: list + */ + private static class ListJobsAction extends NoArgsAction { + @Override + public void run(BigQuery bigquery, Void arg) { + Iterator datasetInfoIterator = bigquery.listJobs().iterateAll(); + while (datasetInfoIterator.hasNext()) { + System.out.println(datasetInfoIterator.next()); + } + } + } + + /** + * This class demonstrates how to retrieve information on a BigQuery Job. + * + * @see Jobs: get + */ + private static class JobInfoAction extends JobAction { + @Override + public void run(BigQuery bigquery, JobId jobId) { + System.out.println("Job info: " + bigquery.getJob(jobId)); + } + } + + /** + * This class demonstrates how to cancel a BigQuery Job. + * + * @see Jobs: cancel + */ + private static class CancelJobAction extends JobAction { + @Override + public void run(BigQuery bigquery, JobId jobId) { + if (bigquery.cancel(jobId)) { + System.out.println("Requested cancel for job " + jobId); + } else { + System.out.println("Job " + jobId + " not found"); + } + } + } + + private abstract static class CreateTableAction extends BigQueryAction { + @Override + void run(BigQuery bigquery, BaseTableInfo table) throws Exception { + BaseTableInfo createTable = bigquery.create(table); + System.out.println("Created table:"); + System.out.println(createTable.toString()); + } + + static Schema parseSchema(String[] args, int start, int end) { + Schema.Builder builder = Schema.builder(); + for (int i = start; i < end; i++) { + String[] fieldsArray = args[i].split(":"); + if (fieldsArray.length != 2) { + throw new IllegalArgumentException("Unrecognized field definition '" + args[i] + "'."); + } + String fieldName = fieldsArray[0]; + String typeString = fieldsArray[1].toLowerCase(); + Field.Type fieldType; + switch (typeString) { + case "string": + fieldType = Field.Type.string(); + break; + case "integer": + fieldType = Field.Type.integer(); + break; + case "timestamp": + fieldType = Field.Type.timestamp(); + break; + case "float": + fieldType = Field.Type.floatingPoint(); + break; + case "boolean": + fieldType = Field.Type.bool(); + break; + default: + throw new IllegalArgumentException("Unrecognized field type '" + typeString + "'."); + } + builder.addField(Field.of(fieldName, fieldType)); + } + return builder.build(); + } + } + + /** + * This class demonstrates how to create a simple BigQuery Table (i.e. a table of type + * {@link BaseTableInfo.Type#TABLE}). + * + * @see Tables: insert + * + */ + private static class CreateSimpleTableAction extends CreateTableAction { + @Override + BaseTableInfo parse(String... args) throws Exception { + if (args.length >= 3) { + String dataset = args[0]; + String table = args[1]; + TableId tableId = TableId.of(dataset, table); + return TableInfo.of(tableId, parseSchema(args, 2, args.length)); + } + throw new IllegalArgumentException("Missing required arguments."); + } + + @Override + protected String params() { + return "
(:)+"; + } + } + + /** + * This class demonstrates how to create a BigQuery External Table (i.e. a table of type + * {@link BaseTableInfo.Type#EXTERNAL}). + * + * @see Tables: insert + * + */ + private static class CreateExternalTableAction extends CreateTableAction { + @Override + BaseTableInfo parse(String... args) throws Exception { + if (args.length >= 5) { + String dataset = args[0]; + String table = args[1]; + TableId tableId = TableId.of(dataset, table); + ExternalDataConfiguration configuration = + ExternalDataConfiguration.of(args[args.length - 1], + parseSchema(args, 3, args.length - 1), FormatOptions.of(args[2])); + return ExternalTableInfo.of(tableId, configuration); + } + throw new IllegalArgumentException("Missing required arguments."); + } + + @Override + protected String params() { + return "
(:)+ "; + } + } + + /** + * This class demonstrates how to create a BigQuery View Table (i.e. a table of type + * {@link BaseTableInfo.Type#VIEW}). + * + * @see Tables: insert + * + */ + private static class CreateViewAction extends CreateTableAction { + @Override + BaseTableInfo parse(String... args) throws Exception { + String message; + if (args.length == 3) { + String dataset = args[0]; + String table = args[1]; + String query = args[2]; + TableId tableId = TableId.of(dataset, table); + return ViewInfo.of(tableId, query); + } else if (args.length < 3) { + message = "Missing required dataset id, table id or query."; + } else { + message = "Too many arguments."; + } + throw new IllegalArgumentException(message); + } + + @Override + protected String params() { + return "
"; + } + } + + private abstract static class JobRunAction extends BigQueryAction { + @Override + void run(BigQuery bigquery, JobInfo job) throws Exception { + System.out.println("Creating job"); + JobInfo startedJob = bigquery.create(job); + while (startedJob.status().state() != JobStatus.State.DONE) { + System.out.println("Waiting for job " + startedJob.jobId().job() + " to complete"); + Thread.sleep(1000L); + startedJob = bigquery.getJob(startedJob.jobId()); + } + if (startedJob.status().error() == null) { + System.out.println("Job " + startedJob.jobId().job() + " succeeded"); + } else { + System.out.println("Job " + startedJob.jobId().job() + " failed"); + System.out.println("Error: " + startedJob.status().error()); + } + } + } + + /** + * This class demonstrates how to create a BigQuery Load Job and wait for it to complete. + * + * @see Jobs: insert + */ + private static class LoadAction extends JobRunAction { + @Override + JobInfo parse(String... args) throws Exception { + if (args.length >= 4) { + String dataset = args[0]; + String table = args[1]; + String format = args[2]; + TableId tableId = TableId.of(dataset, table); + LoadJobConfiguration configuration = LoadJobConfiguration.of( + tableId, Arrays.asList(args).subList(3, args.length), FormatOptions.of(format)); + return JobInfo.of(configuration); + } + throw new IllegalArgumentException("Missing required arguments."); + } + + @Override + protected String params() { + return "
+"; + } + } + + /** + * This class demonstrates how to create a BigQuery Extract Job and wait for it to complete. + * + * @see Jobs: insert + */ + private static class ExtractAction extends JobRunAction { + @Override + JobInfo parse(String... args) throws Exception { + if (args.length >= 4) { + String dataset = args[0]; + String table = args[1]; + String format = args[2]; + TableId tableId = TableId.of(dataset, table); + ExtractJobConfiguration configuration = ExtractJobConfiguration.of( + tableId, Arrays.asList(args).subList(3, args.length), format); + return JobInfo.of(configuration); + } + throw new IllegalArgumentException("Missing required arguments."); + } + + @Override + protected String params() { + return "
+"; + } + } + + /** + * This class demonstrates how to create a BigQuery Copy Job and wait for it to complete. + * + * @see Jobs: insert + */ + private static class CopyAction extends JobRunAction { + @Override + JobInfo parse(String... args) throws Exception { + String message; + if (args.length == 4) { + TableId sourceTableId = TableId.of(args[0], args[1]); + TableId destinationTableId = TableId.of(args[2], args[3]); + return JobInfo.of(CopyJobConfiguration.of(destinationTableId, sourceTableId)); + } else if (args.length < 3) { + message = "Missing required source or destination table."; + } else { + message = "Too many arguments."; + } + throw new IllegalArgumentException(message); + } + + @Override + protected String params() { + return " "; + } + } + + /** + * This class demonstrates how to run a BigQuery SQL Query and wait for associated job to + * complete. Results or errors are shown. + * + * @see Jobs: query + */ + private static class QueryAction extends BigQueryAction { + @Override + void run(BigQuery bigquery, QueryRequest queryRequest) throws Exception { + System.out.println("Running query"); + QueryResponse queryResponse = bigquery.query(queryRequest); + while (!queryResponse.jobCompleted()) { + System.out.println("Waiting for query job " + queryResponse.jobId() + " to complete"); + Thread.sleep(1000L); + queryResponse = bigquery.getQueryResults(queryResponse.jobId()); + } + if (!queryResponse.hasErrors()) { + System.out.println("Query succeeded. Results:"); + Iterator> iterator = queryResponse.result().iterateAll(); + while (iterator.hasNext()) { + System.out.println(iterator.next()); + } + } else { + System.out.println("Query completed with errors. Errors:"); + for (BigQueryError err : queryResponse.executionErrors()) { + System.out.println(err); + } + } + } + + @Override + QueryRequest parse(String... args) throws Exception { + String message; + if (args.length == 1) { + return QueryRequest.of(args[0]); + } else if (args.length > 1) { + message = "Too many arguments."; + } else { + message = "Missing required query."; + } + throw new IllegalArgumentException(message); + } + + @Override + protected String params() { + return ""; + } + } + + /** + * This class demonstrates how to load data into a BigQuery Table from a local file. + * + * @see Resumable + * Upload + */ + private static class LoadFileAction + extends BigQueryAction> { + @Override + void run(BigQuery bigquery, Tuple configuration) + throws Exception { + System.out.println("Running insert"); + try (FileChannel fileChannel = FileChannel.open(Paths.get(configuration.y()))) { + WriteChannel writeChannel = bigquery.writer(configuration.x()); + long position = 0; + long written = fileChannel.transferTo(position, CHUNK_SIZE, writeChannel); + while (written > 0) { + position += written; + written = fileChannel.transferTo(position, CHUNK_SIZE, writeChannel); + } + writeChannel.close(); + } + } + + @Override + Tuple parse(String... args) throws Exception { + if (args.length == 4) { + String dataset = args[0]; + String table = args[1]; + String format = args[2]; + TableId tableId = TableId.of(dataset, table); + WriteChannelConfiguration configuration = + WriteChannelConfiguration.of(tableId, FormatOptions.of(format)); + return Tuple.of(configuration, args[3]); + } + throw new IllegalArgumentException("Missing required arguments."); + } + + @Override + protected String params() { + return "
"; + } + } + + static { + CREATE_ACTIONS.put("dataset", new CreateDatasetAction()); + CREATE_ACTIONS.put("table", new CreateSimpleTableAction()); + CREATE_ACTIONS.put("view", new CreateViewAction()); + CREATE_ACTIONS.put("external-table", new CreateExternalTableAction()); + INFO_ACTIONS.put("dataset", new DatasetInfoAction()); + INFO_ACTIONS.put("table", new TableInfoAction()); + INFO_ACTIONS.put("job", new JobInfoAction()); + LIST_ACTIONS.put("datasets", new ListDatasetsAction()); + LIST_ACTIONS.put("tables", new ListTablesAction()); + LIST_ACTIONS.put("jobs", new ListJobsAction()); + LIST_ACTIONS.put("data", new ListTableDataAction()); + DELETE_ACTIONS.put("dataset", new DeleteDatasetAction()); + DELETE_ACTIONS.put("table", new DeleteTableAction()); + ACTIONS.put("create", new ParentAction(CREATE_ACTIONS)); + ACTIONS.put("info", new ParentAction(INFO_ACTIONS)); + ACTIONS.put("list", new ParentAction(LIST_ACTIONS)); + ACTIONS.put("delete", new ParentAction(DELETE_ACTIONS)); + ACTIONS.put("cancel", new CancelJobAction()); + ACTIONS.put("load", new LoadAction()); + ACTIONS.put("extract", new ExtractAction()); + ACTIONS.put("copy", new CopyAction()); + ACTIONS.put("query", new QueryAction()); + ACTIONS.put("load-file", new LoadFileAction()); + } + + private static void printUsage() { + StringBuilder actionAndParams = new StringBuilder(); + for (Map.Entry entry : ACTIONS.entrySet()) { + actionAndParams.append("\n\t").append(entry.getKey()); + + String param = entry.getValue().params(); + if (param != null && !param.isEmpty()) { + actionAndParams.append(' ').append(param.replace("\n", "\n\t\t")); + } + } + System.out.printf("Usage: %s [] operation [entity] *%s%n", + BigQueryExample.class.getSimpleName(), actionAndParams); + } + + @SuppressWarnings("unchecked") + public static void main(String... args) throws Exception { + if (args.length < 1) { + System.out.println("Missing required project id and action"); + printUsage(); + return; + } + BigQueryOptions.Builder optionsBuilder = BigQueryOptions.builder(); + BigQueryAction action; + String actionName; + if (args.length >= 2 && !ACTIONS.containsKey(args[0])) { + actionName = args[1]; + optionsBuilder.projectId(args[0]); + action = ACTIONS.get(args[1]); + args = Arrays.copyOfRange(args, 2, args.length); + } else { + actionName = args[0]; + action = ACTIONS.get(args[0]); + args = Arrays.copyOfRange(args, 1, args.length); + } + if (action == null) { + System.out.println("Unrecognized action."); + printUsage(); + return; + } + BigQuery bigquery = optionsBuilder.build().service(); + Object request; + try { + request = action.parse(args); + } catch (IllegalArgumentException ex) { + System.out.println("Invalid input for action '" + actionName + "'. " + ex.getMessage()); + System.out.println("Expected: " + action.params()); + return; + } catch (Exception ex) { + System.out.println("Failed to parse request."); + ex.printStackTrace(); + return; + } + action.run(bigquery, request); + } +} diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/DatastoreExample.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/DatastoreExample.java index c707e6686707..1e65a018a1fb 100644 --- a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/DatastoreExample.java +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/DatastoreExample.java @@ -17,7 +17,6 @@ package com.google.gcloud.examples; import com.google.gcloud.datastore.Datastore; -import com.google.gcloud.datastore.DatastoreFactory; import com.google.gcloud.datastore.DatastoreOptions; import com.google.gcloud.datastore.DateTime; import com.google.gcloud.datastore.Entity; @@ -26,8 +25,8 @@ import com.google.gcloud.datastore.Key; import com.google.gcloud.datastore.KeyFactory; import com.google.gcloud.datastore.Query; -import com.google.gcloud.datastore.Query.ResultType; import com.google.gcloud.datastore.QueryResults; +import com.google.gcloud.datastore.StructuredQuery; import com.google.gcloud.datastore.StructuredQuery.PropertyFilter; import com.google.gcloud.datastore.Transaction; @@ -37,11 +36,11 @@ import java.util.TreeMap; /** - * An example of using the Google Cloud Datastore. - *

- * This example adds, display or clear comments for a given user. - *

- * Steps needed for running the example:

    + * An example of using Google Cloud Datastore. + * + *

    This example adds, display or clear comments for a given user. + * + *

    Steps needed for running the example:

      *
    1. login using gcloud SDK - {@code gcloud auth login}.
    2. *
    3. compile using maven - {@code mvn compile}
    4. *
    5. run using maven - {@code mvn exec:java @@ -59,6 +58,7 @@ public class DatastoreExample { private interface DatastoreAction { void run(Transaction tx, Key userKey, String... args); + String getRequiredParams(); } @@ -100,21 +100,31 @@ public void run(Transaction tx, Key userKey, String... args) { return; } System.out.printf("User '%s' has %d comment[s].%n", userKey.name(), user.getLong("count")); - // ORDER BY timestamp"; - String gql = "SELECT * FROM " + COMMENT_KIND + " WHERE __key__ HAS ANCESTOR @1"; - Query query = Query.gqlQueryBuilder(ResultType.ENTITY, gql) - .namespace(NAMESPACE) - .addBinding(userKey) - .build(); - QueryResults results = tx.run(query); - // We could have added "ORDER BY timestamp" to the query to avoid the sorting bellow - // but that would require adding an ancestor index for timestamp - // see: https://cloud.google.com/datastore/docs/tools/indexconfig + int limit = 200; Map sortedComments = new TreeMap<>(); - while (results.hasNext()) { - Entity result = results.next(); - sortedComments.put(result.getDateTime("timestamp"), result.getString("content")); + StructuredQuery query = + Query.entityQueryBuilder() + .namespace(NAMESPACE) + .kind(COMMENT_KIND) + .filter(PropertyFilter.hasAncestor(userKey)) + .limit(limit) + .build(); + while (true) { + QueryResults results = tx.run(query); + int resultCount = 0; + while (results.hasNext()) { + Entity result = results.next(); + sortedComments.put(result.getDateTime("timestamp"), result.getString("content")); + resultCount++; + } + if (resultCount < limit) { + break; + } + query = query.toBuilder().startCursor(results.cursorAfter()).build(); } + // We could have added "ORDER BY timestamp" to the query to avoid sorting, but that would + // require adding an ancestor index for timestamp. + // See: https://cloud.google.com/datastore/docs/tools/indexconfig for (Map.Entry entry : sortedComments.entrySet()) { System.out.printf("\t%s: %s%n", entry.getKey(), entry.getValue()); } @@ -183,7 +193,7 @@ public static void main(String... args) { .namespace(NAMESPACE) .build(); String name = args.length > 1 ? args[1] : System.getProperty("user.name"); - Datastore datastore = DatastoreFactory.instance().get(options); + Datastore datastore = options.service(); KeyFactory keyFactory = datastore.newKeyFactory().kind(USER_KIND); Key key = keyFactory.newKey(name); String actionName = args.length > 2 ? args[2].toLowerCase() : DEFAULT_ACTION; @@ -203,7 +213,7 @@ public static void main(String... args) { DatastoreExample.class.getSimpleName(), actionAndParams); return; } - args = args.length > 3 ? Arrays.copyOfRange(args, 3, args.length): new String []{}; + args = args.length > 3 ? Arrays.copyOfRange(args, 3, args.length) : new String []{}; Transaction tx = datastore.newTransaction(); try { action.run(tx, key, args); diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/ResourceManagerExample.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/ResourceManagerExample.java new file mode 100644 index 000000000000..46ff82bfaf12 --- /dev/null +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/ResourceManagerExample.java @@ -0,0 +1,224 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.examples; + +import com.google.common.base.Joiner; +import com.google.gcloud.resourcemanager.Project; +import com.google.gcloud.resourcemanager.ProjectInfo; +import com.google.gcloud.resourcemanager.ResourceManager; +import com.google.gcloud.resourcemanager.ResourceManagerOptions; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.Scanner; + +/** + * An example of using Google Cloud Resource Manager. + * + *

      This example creates, deletes, gets, and lists projects. + * + *

      Steps needed for running the example:

        + *
      1. login using gcloud SDK - {@code gcloud auth login}.
      2. + *
      3. compile using maven - {@code mvn compile}
      4. + *
      5. run using maven - {@code mvn exec:java + * -Dexec.mainClass="com.google.gcloud.examples.ResourceManagerExample" + * -Dexec.args="[list | [create | delete | get] projectId]"}
      6. + *
      + */ +public class ResourceManagerExample { + + private static final String DEFAULT_ACTION = "list"; + private static final Map ACTIONS = new HashMap<>(); + + private interface ResourceManagerAction { + void run(ResourceManager resourceManager, String... args); + + String[] getRequiredParams(); + + String[] getOptionalParams(); + } + + private static class CreateAction implements ResourceManagerAction { + @Override + public void run(ResourceManager resourceManager, String... args) { + String projectId = args[0]; + Map labels = new HashMap<>(); + for (int i = 1; i < args.length; i += 2) { + if (i + 1 < args.length) { + labels.put(args[i], args[i + 1]); + } else { + labels.put(args[i], ""); + } + } + Project project = + resourceManager.create(ProjectInfo.builder(projectId).labels(labels).build()); + System.out.printf( + "Successfully created project '%s': %s.%n", projectId, projectDetails(project)); + } + + @Override + public String[] getRequiredParams() { + return new String[] {"project-id"}; + } + + @Override + public String[] getOptionalParams() { + return new String[] {"label-key-1", "label-value-1", "label-key-2", "label-value-2", "..."}; + } + } + + private static class DeleteAction implements ResourceManagerAction { + @Override + public void run(ResourceManager resourceManager, String... args) { + String projectId = args[0]; + System.out.printf("Going to delete project \"%s\". Are you sure [y/N]: ", projectId); + Scanner scanner = new Scanner(System.in); + if (scanner.nextLine().toLowerCase().equals("y")) { + resourceManager.delete(projectId); + System.out.println("Successfully deleted project " + projectId + "."); + } else { + System.out.println("Will not delete project " + projectId + "."); + } + scanner.close(); + } + + @Override + public String[] getRequiredParams() { + return new String[] {"project-id"}; + } + + @Override + public String[] getOptionalParams() { + return new String[] {}; + } + } + + private static class GetAction implements ResourceManagerAction { + @Override + public void run(ResourceManager resourceManager, String... args) { + String projectId = args[0]; + ProjectInfo project = resourceManager.get(projectId); + if (project != null) { + System.out.printf( + "Successfully got project '%s': %s.%n", projectId, projectDetails(project)); + } else { + System.out.printf("Could not find project '%s'.%n", projectId); + } + } + + @Override + public String[] getRequiredParams() { + return new String[] {"project-id"}; + } + + @Override + public String[] getOptionalParams() { + return new String[] {}; + } + } + + private static class ListAction implements ResourceManagerAction { + @Override + public void run(ResourceManager resourceManager, String... args) { + System.out.println("Projects you can view:"); + for (ProjectInfo project : resourceManager.list().values()) { + System.out.println(projectDetails(project)); + } + } + + @Override + public String[] getRequiredParams() { + return new String[] {}; + } + + @Override + public String[] getOptionalParams() { + return new String[] {}; + } + } + + static { + ACTIONS.put("create", new CreateAction()); + ACTIONS.put("delete", new DeleteAction()); + ACTIONS.put("get", new GetAction()); + ACTIONS.put("list", new ListAction()); + } + + private static String projectDetails(ProjectInfo project) { + return new StringBuilder() + .append("{projectId:") + .append(project.projectId()) + .append(", projectNumber:") + .append(project.projectNumber()) + .append(", createTimeMillis:") + .append(project.createTimeMillis()) + .append(", state:") + .append(project.state()) + .append(", labels:") + .append(project.labels()) + .append("}") + .toString(); + } + + private static void addUsage( + String actionName, ResourceManagerAction action, StringBuilder usage) { + usage.append(actionName); + Joiner joiner = Joiner.on(" "); + String[] requiredParams = action.getRequiredParams(); + if (requiredParams.length > 0) { + usage.append(' '); + joiner.appendTo(usage, requiredParams); + } + String[] optionalParams = action.getOptionalParams(); + if (optionalParams.length > 0) { + usage.append(" ["); + joiner.appendTo(usage, optionalParams); + usage.append(']'); + } + } + + public static void main(String... args) { + String actionName = args.length > 0 ? args[0].toLowerCase() : DEFAULT_ACTION; + ResourceManagerAction action = ACTIONS.get(actionName); + if (action == null) { + StringBuilder actionAndParams = new StringBuilder(); + for (Map.Entry entry : ACTIONS.entrySet()) { + addUsage(entry.getKey(), entry.getValue(), actionAndParams); + actionAndParams.append('|'); + } + actionAndParams.setLength(actionAndParams.length() - 1); + System.out.printf( + "Usage: %s [%s]%n", ResourceManagerExample.class.getSimpleName(), actionAndParams); + return; + } + + // If you want to access a local Resource Manager emulator (after creating and starting the + // LocalResourceManagerHelper), use the following code instead: + // ResourceManager resourceManager = LocalResourceManagerHelper.options().service(); + ResourceManager resourceManager = ResourceManagerOptions.defaultInstance().service(); + args = args.length > 1 ? Arrays.copyOfRange(args, 1, args.length) : new String[] {}; + if (args.length < action.getRequiredParams().length) { + StringBuilder usage = new StringBuilder(); + usage.append("Usage: "); + addUsage(actionName, action, usage); + System.out.println(usage); + } else { + action.run(resourceManager, args); + } + } +} diff --git a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/StorageExample.java b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/StorageExample.java index d47494b953fc..e3bee626f49c 100644 --- a/gcloud-java-examples/src/main/java/com/google/gcloud/examples/StorageExample.java +++ b/gcloud-java-examples/src/main/java/com/google/gcloud/examples/StorageExample.java @@ -18,20 +18,19 @@ import com.google.gcloud.AuthCredentials; import com.google.gcloud.AuthCredentials.ServiceAccountAuthCredentials; -import com.google.gcloud.RetryParams; +import com.google.gcloud.ReadChannel; +import com.google.gcloud.WriteChannel; import com.google.gcloud.spi.StorageRpc.Tuple; import com.google.gcloud.storage.Blob; import com.google.gcloud.storage.BlobId; import com.google.gcloud.storage.BlobInfo; -import com.google.gcloud.storage.BlobReadChannel; -import com.google.gcloud.storage.BlobWriteChannel; import com.google.gcloud.storage.Bucket; import com.google.gcloud.storage.BucketInfo; +import com.google.gcloud.storage.CopyWriter; import com.google.gcloud.storage.Storage; import com.google.gcloud.storage.Storage.ComposeRequest; import com.google.gcloud.storage.Storage.CopyRequest; import com.google.gcloud.storage.Storage.SignUrlOption; -import com.google.gcloud.storage.StorageFactory; import com.google.gcloud.storage.StorageOptions; import java.io.FileOutputStream; @@ -41,7 +40,6 @@ import java.nio.ByteBuffer; import java.nio.channels.Channels; import java.nio.channels.WritableByteChannel; -import static java.nio.charset.StandardCharsets.UTF_8; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; @@ -53,39 +51,45 @@ import java.security.cert.CertificateException; import java.util.Arrays; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; /** - * An example of using the Google Cloud Storage. - *

      - * This example demonstrates a simple/typical storage usage. - *

      - * Steps needed for running the example: + * An example of using Google Cloud Storage. + * + *

      This example demonstrates a simple/typical storage usage. + * + *

      Steps needed for running the example: *

        *
      1. login using gcloud SDK - {@code gcloud auth login}.
      2. *
      3. compile using maven - {@code mvn compile}
      4. *
      5. run using maven - - * {@code mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.StorageExample" - * -Dexec.args="[] list []| info [ []]| - * download [local_file]| upload []| - * delete +| cp | - * compose + | update_metadata [key=value]*| - * sign_url "} + *
        {@code mvn exec:java -Dexec.mainClass="com.google.gcloud.examples.StorageExample"
        + *  -Dexec.args="[]
        + *  list [] |
        + *  info [ []] |
        + *  download   [local_file] |
        + *  upload   [] |
        + *  delete  + |
        + *  cp     |
        + *  compose  +  |
        + *  update_metadata   [key=value]* |
        + *  sign_url    "}
        *
      6. *
      * - * The first parameter is an optional project_id (logged-in project will be used if not supplied). - * Second parameter is a Storage operation (list, delete, compose,...) to demonstrate the its - * usage. Any other arguments are specific to the operation. - * See each action's run method for the specific Storage interaction. + *

      The first parameter is an optional {@code project_id} (logged-in project will be used if not + * supplied). Second parameter is a Storage operation (list, delete, compose,...) and can be used to + * demonstrate its usage. Any other arguments are specific to the operation. See each action's run + * method for the specific Storage interaction. */ public class StorageExample { private static final Map ACTIONS = new HashMap<>(); - private static abstract class StorageAction { + private abstract static class StorageAction { abstract void run(Storage storage, T request) throws Exception; @@ -96,7 +100,7 @@ protected String params() { } } - private static abstract class BlobsAction extends StorageAction { + private abstract static class BlobsAction extends StorageAction { @Override BlobId[] parse(String... args) { @@ -129,7 +133,7 @@ public void run(Storage storage, BlobId... blobIds) { if (blobIds.length == 1) { if (blobIds[0].name().isEmpty()) { // get Bucket - Bucket bucket = Bucket.load(storage, blobIds[0].bucket()); + Bucket bucket = Bucket.get(storage, blobIds[0].bucket()); if (bucket == null) { System.out.println("No such bucket"); return; @@ -137,7 +141,7 @@ public void run(Storage storage, BlobId... blobIds) { System.out.println("Bucket info: " + bucket.info()); } else { // get Blob - Blob blob = Blob.load(storage, blobIds[0]); + Blob blob = Blob.get(storage, blobIds[0]); if (blob == null) { System.out.println("No such object"); return; @@ -146,7 +150,7 @@ public void run(Storage storage, BlobId... blobIds) { } } else { // use batch to get multiple blobs. - List blobs = Blob.get(storage, blobIds); + List blobs = Blob.get(storage, Arrays.asList(blobIds)); for (Blob blob : blobs) { if (blob != null) { System.out.println(blob.info()); @@ -214,18 +218,20 @@ String parse(String... args) { public void run(Storage storage, String bucketName) { if (bucketName == null) { // list buckets - for (BucketInfo b : storage.list()) { - System.out.println(b); + Iterator bucketInfoIterator = storage.list().iterateAll(); + while (bucketInfoIterator.hasNext()) { + System.out.println(bucketInfoIterator.next()); } } else { // list a bucket's blobs - Bucket bucket = Bucket.load(storage, bucketName); + Bucket bucket = Bucket.get(storage, bucketName); if (bucket == null) { System.out.println("No such bucket"); return; } - for (Blob b : bucket.list()) { - System.out.println(b.info()); + Iterator blobIterator = bucket.list().iterateAll(); + while (blobIterator.hasNext()) { + System.out.println(blobIterator.next().info()); } } } @@ -252,7 +258,7 @@ private void run(Storage storage, Path uploadFrom, BlobInfo blobInfo) throws IOE // When content is not available or large (1MB or more) it is recommended // to write it in chunks via the blob's channel writer. Blob blob = new Blob(storage, blobInfo); - try (BlobWriteChannel writer = blob.writer()) { + try (WriteChannel writer = blob.writer()) { byte[] buffer = new byte[1024]; try (InputStream input = Files.newInputStream(uploadFrom)) { int limit; @@ -305,7 +311,7 @@ public void run(Storage storage, Tuple tuple) throws IOException { } private void run(Storage storage, BlobId blobId, Path downloadTo) throws IOException { - Blob blob = Blob.load(storage, blobId); + Blob blob = Blob.get(storage, blobId); if (blob == null) { System.out.println("No such object"); return; @@ -320,7 +326,7 @@ private void run(Storage storage, BlobId blobId, Path downloadTo) throws IOExcep writeTo.write(content); } else { // When Blob size is big or unknown use the blob's channel reader. - try (BlobReadChannel reader = blob.reader()) { + try (ReadChannel reader = blob.reader()) { WritableByteChannel channel = Channels.newChannel(writeTo); ByteBuffer bytes = ByteBuffer.allocate(64 * 1024); while (reader.read(bytes) > 0) { @@ -368,8 +374,8 @@ public String params() { private static class CopyAction extends StorageAction { @Override public void run(Storage storage, CopyRequest request) { - BlobInfo copiedBlobInfo = storage.copy(request); - System.out.println("Copied " + copiedBlobInfo); + CopyWriter copyWriter = storage.copy(request); + System.out.println("Copied " + copyWriter.result()); } @Override @@ -377,7 +383,7 @@ CopyRequest parse(String... args) { if (args.length != 4) { throw new IllegalArgumentException(); } - return CopyRequest.of(args[0], args[1], BlobInfo.builder(args[2], args[3]).build()); + return CopyRequest.of(args[0], args[1], BlobId.of(args[2], args[3])); } @Override @@ -432,7 +438,7 @@ public void run(Storage storage, Tuple> tuple) } private void run(Storage storage, BlobId blobId, Map metadata) { - Blob blob = Blob.load(storage, blobId); + Blob blob = Blob.get(storage, blobId); if (blob == null) { System.out.println("No such object"); return; @@ -485,8 +491,8 @@ public void run(Storage storage, Tuple private void run(Storage storage, ServiceAccountAuthCredentials cred, BlobInfo blobInfo) throws IOException { Blob blob = new Blob(storage, blobInfo); - System.out.println("Signed URL: " + - blob.signUrl(1, TimeUnit.DAYS, SignUrlOption.serviceAccount(cred))); + System.out.println("Signed URL: " + + blob.signUrl(1, TimeUnit.DAYS, SignUrlOption.serviceAccount(cred))); } @Override @@ -521,7 +527,7 @@ public String params() { ACTIONS.put("sign_url", new SignUrlAction()); } - public static void printUsage() { + private static void printUsage() { StringBuilder actionAndParams = new StringBuilder(); for (Map.Entry entry : ACTIONS.entrySet()) { actionAndParams.append("\n\t").append(entry.getKey()); @@ -542,14 +548,16 @@ public static void main(String... args) throws Exception { printUsage(); return; } - StorageOptions.Builder optionsBuilder = - StorageOptions.builder().retryParams(RetryParams.getDefaultInstance()); + StorageOptions.Builder optionsBuilder = StorageOptions.builder(); StorageAction action; + String actionName; if (args.length >= 2 && !ACTIONS.containsKey(args[0])) { + actionName = args[1]; optionsBuilder.projectId(args[0]); action = ACTIONS.get(args[1]); args = Arrays.copyOfRange(args, 2, args.length); } else { + actionName = args[0]; action = ACTIONS.get(args[0]); args = Arrays.copyOfRange(args, 1, args.length); } @@ -558,12 +566,12 @@ public static void main(String... args) throws Exception { printUsage(); return; } - Storage storage = StorageFactory.instance().get(optionsBuilder.build()); + Storage storage = optionsBuilder.build().service(); Object request; try { request = action.parse(args); } catch (IllegalArgumentException ex) { - System.out.println("Invalid input for action '" + args[1] + "'"); + System.out.println("Invalid input for action '" + actionName + "'"); System.out.println("Expected: " + action.params()); return; } catch (Exception ex) { diff --git a/gcloud-java-resourcemanager/README.md b/gcloud-java-resourcemanager/README.md new file mode 100644 index 000000000000..d9a99e12b7a5 --- /dev/null +++ b/gcloud-java-resourcemanager/README.md @@ -0,0 +1,222 @@ +Google Cloud Java Client for Resource Manager (Alpha) +============================================= + +Java idiomatic client for [Google Cloud Resource Manager] (https://cloud.google.com/resource-manager/). + +[![Build Status](https://travis-ci.org/GoogleCloudPlatform/gcloud-java.svg?branch=master)](https://travis-ci.org/GoogleCloudPlatform/gcloud-java) +[![Coverage Status](https://coveralls.io/repos/GoogleCloudPlatform/gcloud-java/badge.svg?branch=master)](https://coveralls.io/r/GoogleCloudPlatform/gcloud-java?branch=master) +[![Maven](https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java-resourcemanager.svg)]( https://img.shields.io/maven-central/v/com.google.gcloud/gcloud-java-resourcemanager.svg) + +- [Homepage] (https://googlecloudplatform.github.io/gcloud-java/) +- [API Documentation] (http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/resourcemanager/package-summary.html) + +> Note: This client is a work-in-progress, and may occasionally +> make backwards-incompatible changes. + +Quickstart +---------- +If you are using Maven, add this to your pom.xml file +```xml + + com.google.gcloud + gcloud-java-resourcemanager + 0.1.3 + +``` +If you are using Gradle, add this to your dependencies +```Groovy +compile 'com.google.gcloud:gcloud-java-resourcemanager:0.1.3' +``` +If you are using SBT, add this to your dependencies +```Scala +libraryDependencies += "com.google.gcloud" % "gcloud-java-resourcemanager" % "0.1.3" +``` + +Example Application +-------------------- +[`ResourceManagerExample`](https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/gcloud-java-examples/src/main/java/com/google/gcloud/examples/ResourceManagerExample.java) is a simple command line interface for the Cloud Resource Manager. Read more about using the application on the [`gcloud-java-examples` docs page](http://googlecloudplatform.github.io/gcloud-java/apidocs/?com/google/gcloud/examples/ResourceManagerExample.html). + +Authentication +-------------- + +Unlike other `gcloud-java` service libraries, `gcloud-java-resourcemanager` only accepts Google Cloud SDK credentials at this time. If you are having trouble authenticating, it may be that you have other types of credentials that override your Google Cloud SDK credentials. See more about Google Cloud SDK credentials and credential precedence in the global README's [Authentication section](https://github.com/GoogleCloudPlatform/gcloud-java#authentication). + +About Google Cloud Resource Manager +----------------------------------- + +Google [Cloud Resource Manager][cloud-resourcemanager] provides a programmatic way to manage your Google Cloud Platform projects. With this API, you can do the following: + +* Get a list of all projects associated with an account. +* Create new projects. +* Update existing projects. +* Delete projects. +* Undelete projects that you don't want to delete. + +Google Cloud Resource Manager is currently in beta and may occasionally make backwards incompatible changes. + +Be sure to activate the Google Cloud Resource Manager API on the Developer's Console to use Resource Manager from your project. + +See the ``gcloud-java`` API [Resource Manager documentation][resourcemanager-api] to learn how to interact +with the Cloud Resource Manager using this client Library. + +Getting Started +--------------- +#### Prerequisites +You will need to set up the local development environment by [installing the Google Cloud SDK](https://cloud.google.com/sdk/) and running the following command in command line: `gcloud auth login`. + +> Note: You don't need a project ID to use this service. If you have a project ID set in the Google Cloud SDK, you can unset it by typing `gcloud config unset project` in command line. + +#### Installation and setup +You'll need to obtain the `gcloud-java-resourcemanager` library. See the [Quickstart](#quickstart) section to add `gcloud-java-resourcemanager` as a dependency in your code. + +#### Creating an authorized service object +To make authenticated requests to Google Cloud Resource Manager, you must create a service object with Google Cloud SDK credentials. You can then make API calls by calling methods on the Resource Manager service object. The simplest way to authenticate is to use [Application Default Credentials](https://developers.google.com/identity/protocols/application-default-credentials). These credentials are automatically inferred from your environment, so you only need the following code to create your service object: + +```java +import com.google.gcloud.resourcemanager.ResourceManager; +import com.google.gcloud.resourcemanager.ResourceManagerOptions; + +ResourceManager resourceManager = ResourceManagerOptions.defaultInstance().service(); +``` + +#### Creating a project +All you need to create a project is a globally unique project ID. You can also optionally attach a non-unique name and labels to your project. Read more about naming guidelines for project IDs, names, and labels [here](https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects). To create a project, add the following import at the top of your file: + +```java +import com.google.gcloud.resourcemanager.Project; +import com.google.gcloud.resourcemanager.ProjectInfo; +``` + +Then add the following code to create a project (be sure to change `myProjectId` to your own unique project ID). + +```java +String myProjectId = "my-globally-unique-project-id"; // Change to a unique project ID. +Project myProject = resourceManager.create(ProjectInfo.builder(myProjectId).build()); +``` + +Note that the return value from `create` is a `Project` that includes additional read-only information, like creation time, project number, and lifecycle state. Read more about these fields on the [Projects page](https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects). `Project`, a subclass of `ProjectInfo`, adds a layer of service-related functionality over `ProjectInfo`. + +#### Getting a specific project +You can load a project if you know it's project ID and have read permissions to the project. For example, to get the project we just created we can do the following: + +```java +Project projectFromServer = resourceManager.get(myProjectId); +``` + +#### Editing a project +To edit a project, create a new `ProjectInfo` object and pass it in to the `Project.replace` method. + +For example, to add a label for the newly created project to denote that it's launch status is "in development", add the following code: + +```java +Project newProject = myProject.toBuilder() + .addLabel("launch-status", "in-development") + .build() + .replace(); +``` + +Note that the values of the project you pass in to `replace` overwrite the server's values for non-read-only fields, namely `projectName` and `labels`. For example, if you create a project with `projectName` "some-project-name" and subsequently call replace using a `ProjectInfo` object that didn't set the `projectName`, then the server will unset the project's name. The server ignores any attempted changes to the read-only fields `projectNumber`, `lifecycleState`, and `createTime`. The `projectId` cannot change. + +#### Listing all projects +Suppose that we want a list of all projects for which we have read permissions. Add the following import: + +```java +import java.util.Iterator; +``` + +Then add the following code to print a list of projects you can view: + +```java +Iterator projectIterator = resourceManager.list().iterateAll(); +System.out.println("Projects I can view:"); +while (projectIterator.hasNext()) { + System.out.println(projectIterator.next().projectId()); +} +``` + +#### Complete source code + +Here we put together all the code shown above into one program. This program assumes that you are running from your own desktop and used the Google Cloud SDK to authenticate yourself. + +```java +import com.google.gcloud.resourcemanager.Project; +import com.google.gcloud.resourcemanager.ProjectInfo; +import com.google.gcloud.resourcemanager.ResourceManager; +import com.google.gcloud.resourcemanager.ResourceManagerOptions; + +import java.util.Iterator; + +public class GcloudJavaResourceManagerExample { + + public static void main(String[] args) { + // Create Resource Manager service object. + // By default, credentials are inferred from the runtime environment. + ResourceManager resourceManager = ResourceManagerOptions.defaultInstance().service(); + + // Create a project. + String myProjectId = "my-globally-unique-project-id"; // Change to a unique project ID. + Project myProject = resourceManager.create(ProjectInfo.builder(myProjectId).build()); + + // Get a project from the server. + Project projectFromServer = resourceManager.get(myProjectId); + System.out.println("Got project " + projectFromServer.projectId() + " from the server."); + + // Update a project + Project newProject = myProject.toBuilder() + .addLabel("launch-status", "in-development") + .build() + .replace(); + System.out.println("Updated the labels of project " + newProject.projectId() + + " to be " + newProject.labels()); + + // List all the projects you have permission to view. + Iterator projectIterator = resourceManager.list().iterateAll(); + System.out.println("Projects I can view:"); + while (projectIterator.hasNext()) { + System.out.println(projectIterator.next().projectId()); + } + } +} +``` + +Java Versions +------------- + +Java 7 or above is required for using this client. + +Versioning +---------- + +This library follows [Semantic Versioning] (http://semver.org/). + +It is currently in major version zero (``0.y.z``), which means that anything +may change at any time and the public API should not be considered +stable. + +Testing +------- + +This library has tools to help write tests for code that uses Resource Manager. + +See [TESTING] to read more about testing. + +Contributing +------------ + +Contributions to this library are always welcome and highly encouraged. + +See [CONTRIBUTING] for more information on how to get started. + +License +------- + +Apache 2.0 - See [LICENSE] for more information. + + +[CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md +[LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE +[TESTING]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/TESTING.md#testing-code-that-uses-resource-manager +[cloud-platform]: https://cloud.google.com/ +[cloud-resourcemanager]: https://cloud.google.com/resource-manager/docs +[resourcemanager-api]: http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/resourcemanager/package-summary.html + diff --git a/gcloud-java-resourcemanager/pom.xml b/gcloud-java-resourcemanager/pom.xml new file mode 100644 index 000000000000..8fc6dd723eef --- /dev/null +++ b/gcloud-java-resourcemanager/pom.xml @@ -0,0 +1,50 @@ + + + 4.0.0 + com.google.gcloud + gcloud-java-resourcemanager + jar + GCloud Java resource manager + + Java idiomatic client for Google Cloud Resource Manager. + + + com.google.gcloud + gcloud-java-pom + 0.1.4-SNAPSHOT + + + gcloud-java-resourcemanager + + + + ${project.groupId} + gcloud-java-core + ${project.version} + + + com.google.apis + google-api-services-cloudresourcemanager + v1beta1-rev6-1.19.0 + compile + + + com.google.guava + guava-jdk5 + + + + + junit + junit + 4.12 + test + + + org.easymock + easymock + 3.3 + test + + + diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Option.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Option.java new file mode 100644 index 000000000000..f48c057ba049 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Option.java @@ -0,0 +1,72 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.base.MoreObjects; +import com.google.gcloud.spi.ResourceManagerRpc; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Base class for Resource Manager operation options. + */ +class Option implements Serializable { + + private static final long serialVersionUID = 2655177550880762967L; + + private final ResourceManagerRpc.Option rpcOption; + private final Object value; + + Option(ResourceManagerRpc.Option rpcOption, Object value) { + this.rpcOption = checkNotNull(rpcOption); + this.value = value; + } + + ResourceManagerRpc.Option rpcOption() { + return rpcOption; + } + + Object value() { + return value; + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Option)) { + return false; + } + Option other = (Option) obj; + return Objects.equals(rpcOption, other.rpcOption) + && Objects.equals(value, other.value); + } + + @Override + public int hashCode() { + return Objects.hash(rpcOption, value); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("name", rpcOption.value()) + .add("value", value) + .toString(); + } +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Project.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Project.java new file mode 100644 index 000000000000..f12a7ea50676 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/Project.java @@ -0,0 +1,243 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import static com.google.common.base.Preconditions.checkNotNull; + +import java.io.IOException; +import java.io.ObjectInputStream; +import java.util.Map; +import java.util.Objects; + +/** + * A Google Cloud Resource Manager project object. + * + *

      A Project is a high-level Google Cloud Platform entity. It is a container for ACLs, APIs, + * AppEngine Apps, VMs, and other Google Cloud Platform resources. This class' member variables are + * immutable. Methods that change or update the underlying Project information return a new Project + * instance. {@code Project} adds a layer of service-related functionality over {@link ProjectInfo}. + */ +public class Project extends ProjectInfo { + + private static final long serialVersionUID = 6767630161335155133L; + + private final ResourceManagerOptions options; + private transient ResourceManager resourceManager; + + public static class Builder extends ProjectInfo.Builder { + private final ResourceManager resourceManager; + private ProjectInfo.BuilderImpl infoBuilder; + + Builder(ResourceManager resourceManager) { + this.resourceManager = resourceManager; + this.infoBuilder = new ProjectInfo.BuilderImpl(); + } + + Builder(Project project) { + this.resourceManager = project.resourceManager; + this.infoBuilder = new ProjectInfo.BuilderImpl(project); + } + + @Override + public Builder name(String name) { + infoBuilder.name(name); + return this; + } + + @Override + public Builder projectId(String projectId) { + infoBuilder.projectId(projectId); + return this; + } + + @Override + public Builder addLabel(String key, String value) { + infoBuilder.addLabel(key, value); + return this; + } + + @Override + public Builder removeLabel(String key) { + infoBuilder.removeLabel(key); + return this; + } + + @Override + public Builder clearLabels() { + infoBuilder.clearLabels(); + return this; + } + + @Override + public Builder labels(Map labels) { + infoBuilder.labels(labels); + return this; + } + + @Override + Builder projectNumber(Long projectNumber) { + infoBuilder.projectNumber(projectNumber); + return this; + } + + @Override + Builder state(State state) { + infoBuilder.state(state); + return this; + } + + @Override + Builder createTimeMillis(Long createTimeMillis) { + infoBuilder.createTimeMillis(createTimeMillis); + return this; + } + + @Override + Builder parent(ResourceId parent) { + infoBuilder.parent(parent); + return this; + } + + @Override + public Project build() { + return new Project(resourceManager, infoBuilder); + } + } + + Project(ResourceManager resourceManager, ProjectInfo.BuilderImpl infoBuilder) { + super(infoBuilder); + this.resourceManager = checkNotNull(resourceManager); + this.options = resourceManager.options(); + } + + /** + * Constructs a Project object that contains project information got from the server. + * + * @return Project object containing the project's metadata or {@code null} if not found + * @throws ResourceManagerException upon failure + */ + public static Project get(ResourceManager resourceManager, String projectId) { + return resourceManager.get(projectId); + } + + /** + * Returns the {@link ResourceManager} service object associated with this Project. + */ + public ResourceManager resourceManager() { + return resourceManager; + } + + /** + * Fetches the current project's latest information. Returns {@code null} if the job does not + * exist. + * + * @return Project containing the project's updated metadata or {@code null} if not found + * @throws ResourceManagerException upon failure + */ + public Project reload() { + return Project.get(resourceManager, projectId()); + } + + /** + * Marks the project identified by the specified project ID for deletion. + * + *

      This method will only affect the project if the following criteria are met: + *

        + *
      • The project does not have a billing account associated with it. + *
      • The project has a lifecycle state of {@link ProjectInfo.State#ACTIVE}. + *
      + * This method changes the project's lifecycle state from {@link ProjectInfo.State#ACTIVE} to + * {@link ProjectInfo.State#DELETE_REQUESTED}. The deletion starts at an unspecified time, at + * which point the lifecycle state changes to {@link ProjectInfo.State#DELETE_IN_PROGRESS}. Until + * the deletion completes, you can check the lifecycle state checked by retrieving the project + * with {@link ResourceManager#get}, and the project remains visible to + * {@link ResourceManager#list}. However, you cannot update the project. After the deletion + * completes, the project is not retrievable by the {@link ResourceManager#get} and + * {@link ResourceManager#list} methods. The caller must have modify permissions for this project. + * + * @see Cloud + * Resource Manager delete + * @throws ResourceManagerException upon failure + */ + public void delete() { + resourceManager.delete(projectId()); + } + + /** + * Restores the project identified by the specified project ID. + * + *

      You can only use this method for a project that has a lifecycle state of + * {@link ProjectInfo.State#DELETE_REQUESTED}. After deletion starts, as indicated by a lifecycle + * state of {@link ProjectInfo.State#DELETE_IN_PROGRESS}, the project cannot be restored. The + * caller must have modify permissions for this project. + * + * @see Cloud + * Resource Manager undelete + * @throws ResourceManagerException upon failure (including when the project can't be restored) + */ + public void undelete() { + resourceManager.undelete(projectId()); + } + + /** + * Replaces the attributes of the project with the attributes of this project. + * + *

      The caller must have modify permissions for this project. + * + * @see Cloud + * Resource Manager update + * @return the Project representing the new project metadata + * @throws ResourceManagerException upon failure + */ + public Project replace() { + return resourceManager.replace(this); + } + + static Builder builder(ResourceManager resourceManager, String projectId) { + return new Builder(resourceManager).projectId(projectId); + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof Project && Objects.equals(toPb(), ((Project) obj).toPb()) + && Objects.equals(options, ((Project) obj).options); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), options); + } + + private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + this.resourceManager = options.service(); + } + + static Project fromPb(ResourceManager resourceManager, + com.google.api.services.cloudresourcemanager.model.Project answer) { + ProjectInfo info = ProjectInfo.fromPb(answer); + return new Project(resourceManager, new ProjectInfo.BuilderImpl(info)); + } +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ProjectInfo.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ProjectInfo.java new file mode 100644 index 000000000000..7553a207cd29 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ProjectInfo.java @@ -0,0 +1,389 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +package com.google.gcloud.resourcemanager; + +import static com.google.common.base.MoreObjects.firstNonNull; +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.client.util.Data; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Maps; + +import org.joda.time.DateTime; +import org.joda.time.format.ISODateTimeFormat; + +import java.io.Serializable; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * A Google Cloud Resource Manager project metadata object. + * A Project is a high-level Google Cloud Platform entity. It is a container for ACLs, APIs, + * AppEngine Apps, VMs, and other Google Cloud Platform resources. + */ +public class ProjectInfo implements Serializable { + + private static final long serialVersionUID = 9148970963697734236L; + private final String name; + private final String projectId; + private final Map labels; + private final Long projectNumber; + private final State state; + private final Long createTimeMillis; + private final ResourceId parent; + + /** + * The project lifecycle states. + */ + public enum State { + /** + * Only used/useful for distinguishing unset values. + */ + LIFECYCLE_STATE_UNSPECIFIED, + + /** + * The normal and active state. + */ + ACTIVE, + + /** + * The project has been marked for deletion by the user or by the system (Google Cloud + * Platform). This can generally be reversed by calling {@link ResourceManager#undelete}. + */ + DELETE_REQUESTED, + + /** + * The process of deleting the project has begun. Reversing the deletion is no longer possible. + */ + DELETE_IN_PROGRESS + } + + static class ResourceId implements Serializable { + + private static final long serialVersionUID = -325199985993344726L; + + private final String id; + private final String type; + + ResourceId(String id, String type) { + this.id = checkNotNull(id); + this.type = checkNotNull(type); + } + + String id() { + return id; + } + + String type() { + return type; + } + + @Override + public boolean equals(Object obj) { + return obj instanceof ResourceId && Objects.equals(toPb(), ((ResourceId) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(id, type); + } + + com.google.api.services.cloudresourcemanager.model.ResourceId toPb() { + com.google.api.services.cloudresourcemanager.model.ResourceId resourceIdPb = + new com.google.api.services.cloudresourcemanager.model.ResourceId(); + resourceIdPb.setId(id); + resourceIdPb.setType(type.toLowerCase()); + return resourceIdPb; + } + + static ResourceId fromPb( + com.google.api.services.cloudresourcemanager.model.ResourceId resourceIdPb) { + return new ResourceId(resourceIdPb.getId(), resourceIdPb.getType()); + } + } + + public static abstract class Builder { + + /** + * Set the user-assigned name of the project. + * + *

      This field is optional and can remain unset. Allowed characters are: lowercase and + * uppercase letters, numbers, hyphen, single-quote, double-quote, space, and exclamation point. + * This field can be changed after project creation. + */ + public abstract Builder name(String name); + + /** + * Set the unique, user-assigned ID of the project. + * + *

      The ID must be 6 to 30 lowercase letters, digits, or hyphens. It must start with a letter. + * Trailing hyphens are prohibited. This field cannot be changed after the server creates the + * project. + */ + public abstract Builder projectId(String projectId); + + /** + * Add a label associated with this project. + * + *

      See {@link #labels} for label restrictions. + */ + public abstract Builder addLabel(String key, String value); + + /** + * Remove a label associated with this project. + */ + public abstract Builder removeLabel(String key); + + /** + * Clear the labels associated with this project. + */ + public abstract Builder clearLabels(); + + /** + * Set the labels associated with this project. + * + *

      Label keys must be between 1 and 63 characters long and must conform to the following + * regular expression: [a-z]([-a-z0-9]*[a-z0-9])?. Label values must be between 0 and 63 + * characters long and must conform to the regular expression ([a-z]([-a-z0-9]*[a-z0-9])?)?. No + * more than 256 labels can be associated with a given resource. This field can be changed after + * project creation. + */ + public abstract Builder labels(Map labels); + + abstract Builder projectNumber(Long projectNumber); + + abstract Builder state(State state); + + abstract Builder createTimeMillis(Long createTimeMillis); + + abstract Builder parent(ResourceId parent); + + public abstract ProjectInfo build(); + } + + static class BuilderImpl extends Builder { + + private String name; + private String projectId; + private Map labels = new HashMap<>(); + private Long projectNumber; + private State state; + private Long createTimeMillis; + private ResourceId parent; + + BuilderImpl() {} + + BuilderImpl(ProjectInfo info) { + this.name = info.name; + this.projectId = info.projectId; + this.labels.putAll(info.labels); + this.projectNumber = info.projectNumber; + this.state = info.state; + this.createTimeMillis = info.createTimeMillis; + this.parent = info.parent; + } + + @Override + public Builder name(String name) { + this.name = firstNonNull(name, Data.nullOf(String.class)); + return this; + } + + @Override + public Builder projectId(String projectId) { + this.projectId = checkNotNull(projectId); + return this; + } + + @Override + public Builder addLabel(String key, String value) { + this.labels.put(key, value); + return this; + } + + @Override + public Builder removeLabel(String key) { + this.labels.remove(key); + return this; + } + + @Override + public Builder clearLabels() { + this.labels.clear(); + return this; + } + + @Override + public Builder labels(Map labels) { + this.labels = Maps.newHashMap(checkNotNull(labels)); + return this; + } + + @Override + Builder projectNumber(Long projectNumber) { + this.projectNumber = projectNumber; + return this; + } + + @Override + Builder state(State state) { + this.state = state; + return this; + } + + @Override + Builder createTimeMillis(Long createTimeMillis) { + this.createTimeMillis = createTimeMillis; + return this; + } + + @Override + Builder parent(ResourceId parent) { + this.parent = parent; + return this; + } + + @Override + public ProjectInfo build() { + return new ProjectInfo(this); + } + } + + ProjectInfo(BuilderImpl builder) { + this.name = builder.name; + this.projectId = builder.projectId; + this.labels = ImmutableMap.copyOf(builder.labels); + this.projectNumber = builder.projectNumber; + this.state = builder.state; + this.createTimeMillis = builder.createTimeMillis; + this.parent = builder.parent; + } + + /** + * Get the unique, user-assigned ID of the project. + * + *

      This field cannot be changed after the server creates the project. + */ + public String projectId() { + return projectId; + } + + /** + * Get the user-assigned name of the project. + * + *

      This field is optional, can remain unset, and can be changed after project creation. + */ + public String name() { + return Data.isNull(name) ? null : name; + } + + /** + * Get number uniquely identifying the project. + * + *

      This field is set by the server and is read-only. + */ + public Long projectNumber() { + return projectNumber; + } + + /** + * Get the immutable map of labels associated with this project. + */ + public Map labels() { + return labels; + } + + /** + * Get the project's lifecycle state. + * + *

      This is a read-only field. To change the lifecycle state of your project, use the + * {@code delete} or {@code undelete} method. + */ + public State state() { + return state; + } + + ResourceId parent() { + return parent; + } + + /** + * Get the project's creation time (in milliseconds). + * + *

      This field is set by the server and is read-only. + */ + public Long createTimeMillis() { + return createTimeMillis; + } + + @Override + public boolean equals(Object obj) { + return obj.getClass().equals(ProjectInfo.class) + && Objects.equals(toPb(), ((ProjectInfo) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(name, projectId, labels, projectNumber, state, createTimeMillis, parent); + } + + public static Builder builder(String id) { + return new BuilderImpl().projectId(id); + } + + public Builder toBuilder() { + return new BuilderImpl(this); + } + + com.google.api.services.cloudresourcemanager.model.Project toPb() { + com.google.api.services.cloudresourcemanager.model.Project projectPb = + new com.google.api.services.cloudresourcemanager.model.Project(); + projectPb.setName(name); + projectPb.setProjectId(projectId); + projectPb.setLabels(labels); + projectPb.setProjectNumber(projectNumber); + if (state != null) { + projectPb.setLifecycleState(state.toString()); + } + if (createTimeMillis != null) { + projectPb.setCreateTime(ISODateTimeFormat.dateTime().withZoneUTC().print(createTimeMillis)); + } + if (parent != null) { + projectPb.setParent(parent.toPb()); + } + return projectPb; + } + + static ProjectInfo fromPb(com.google.api.services.cloudresourcemanager.model.Project projectPb) { + Builder builder = builder(projectPb.getProjectId()).projectNumber(projectPb.getProjectNumber()); + if (projectPb.getName() != null && !projectPb.getName().equals("Unnamed")) { + builder.name(projectPb.getName()); + } + if (projectPb.getLabels() != null) { + builder.labels(projectPb.getLabels()); + } + if (projectPb.getLifecycleState() != null) { + builder.state(State.valueOf(projectPb.getLifecycleState())); + } + if (projectPb.getCreateTime() != null) { + builder.createTimeMillis(DateTime.parse(projectPb.getCreateTime()).getMillis()); + } + if (projectPb.getParent() != null) { + builder.parent(ResourceId.fromPb(projectPb.getParent())); + } + return builder.build(); + } +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManager.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManager.java new file mode 100644 index 000000000000..af772dce6b60 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManager.java @@ -0,0 +1,269 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import com.google.common.base.Joiner; +import com.google.common.collect.Sets; +import com.google.gcloud.Page; +import com.google.gcloud.Service; +import com.google.gcloud.spi.ResourceManagerRpc; + +import java.util.Set; + +/** + * An interface for Google Cloud Resource Manager. + * + * @see Google Cloud Resource Manager + */ +public interface ResourceManager extends Service { + + String DEFAULT_CONTENT_TYPE = "application/octet-stream"; + + /** + * The fields of a project. + * + *

      These values can be used to specify the fields to include in a partial response when calling + * {@link ResourceManager#get} or {@link ResourceManager#list}. Project ID is always returned, + * even if not specified. + */ + enum ProjectField { + PROJECT_ID("projectId"), + NAME("name"), + LABELS("labels"), + PROJECT_NUMBER("projectNumber"), + STATE("lifecycleState"), + CREATE_TIME("createTime"); + + private final String selector; + + ProjectField(String selector) { + this.selector = selector; + } + + public String selector() { + return selector; + } + + static String selector(ProjectField... fields) { + Set fieldStrings = Sets.newHashSetWithExpectedSize(fields.length + 1); + fieldStrings.add(PROJECT_ID.selector()); + for (ProjectField field : fields) { + fieldStrings.add(field.selector()); + } + return Joiner.on(',').join(fieldStrings); + } + } + + /** + * Class for specifying project get options. + */ + class ProjectGetOption extends Option { + + private static final long serialVersionUID = 270185129961146874L; + + private ProjectGetOption(ResourceManagerRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the project's fields to be returned by the RPC call. + * + *

      If this option is not provided all project fields are returned. + * {@code ProjectGetOption.fields} can be used to specify only the fields of interest. Project + * ID is always returned, even if not specified. {@link ProjectField} provides a list of fields + * that can be used. + */ + public static ProjectGetOption fields(ProjectField... fields) { + return new ProjectGetOption(ResourceManagerRpc.Option.FIELDS, ProjectField.selector(fields)); + } + } + + /** + * Class for specifying project list options. + */ + class ProjectListOption extends Option { + + private static final long serialVersionUID = 7888768979702012328L; + + private ProjectListOption(ResourceManagerRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify a filter. + * + *

      Filter rules are case insensitive. The fields eligible for filtering are: + *

        + *
      • name + *
      • project ID + *
      • labels.key, where key is the name of a label + *
      + * + *

      You can specify multiple filters by adding a space between each filter. Multiple filters + * are composed using "and". + * + *

      Some examples of filters: + *

        + *
      • name:* The project has a name. + *
      • name:Howl The project's name is Howl or howl. + *
      • name:HOWL Equivalent to above. + *
      • NAME:howl Equivalent to above. + *
      • labels.color:* The project has the label color. + *
      • labels.color:red The project's label color has the value red. + *
      • labels.color:red label.size:big The project's label color has the value red and its + * label size has the value big. + *
      + */ + public static ProjectListOption filter(String filter) { + return new ProjectListOption(ResourceManagerRpc.Option.FILTER, filter); + } + + /** + * Returns an option to specify a page token. + * + *

      The page token (returned from a previous call to list) indicates from where listing should + * continue. + */ + public static ProjectListOption pageToken(String pageToken) { + return new ProjectListOption(ResourceManagerRpc.Option.PAGE_TOKEN, pageToken); + } + + /** + * The maximum number of projects to return per RPC. + * + *

      The server can return fewer projects than requested. When there are more results than the + * page size, the server will return a page token that can be used to fetch other results. + * Note: pagination is not yet supported; the server currently ignores this field and returns + * all results. + */ + public static ProjectListOption pageSize(int pageSize) { + return new ProjectListOption(ResourceManagerRpc.Option.PAGE_SIZE, pageSize); + } + + /** + * Returns an option to specify the project's fields to be returned by the RPC call. + * + *

      If this option is not provided all project fields are returned. + * {@code ProjectListOption.fields} can be used to specify only the fields of interest. Project + * ID is always returned, even if not specified. {@link ProjectField} provides a list of fields + * that can be used. + */ + public static ProjectListOption fields(ProjectField... fields) { + StringBuilder builder = new StringBuilder(); + builder.append("projects(").append(ProjectField.selector(fields)).append(")"); + return new ProjectListOption(ResourceManagerRpc.Option.FIELDS, builder.toString()); + } + } + + /** + * Create a new project. + * + *

      Initially, the project resource is owned by its creator exclusively. The creator can later + * grant permission to others to read or update the project. Several APIs are activated + * automatically for the project, including Google Cloud Storage. + * + * @see Cloud + * Resource Manager create + * @return Project object representing the new project's metadata. The returned object will + * include the following read-only fields supplied by the server: project number, lifecycle + * state, and creation time. + * @throws ResourceManagerException upon failure + */ + Project create(ProjectInfo project); + + /** + * Marks the project identified by the specified project ID for deletion. + * + *

      This method will only affect the project if the following criteria are met: + *

        + *
      • The project does not have a billing account associated with it. + *
      • The project has a lifecycle state of {@link ProjectInfo.State#ACTIVE}. + *
      + * This method changes the project's lifecycle state from {@link ProjectInfo.State#ACTIVE} to + * {@link ProjectInfo.State#DELETE_REQUESTED}. The deletion starts at an unspecified time, at + * which point the lifecycle state changes to {@link ProjectInfo.State#DELETE_IN_PROGRESS}. Until + * the deletion completes, you can check the lifecycle state checked by retrieving the project + * with {@link ResourceManager#get}, and the project remains visible to + * {@link ResourceManager#list}. However, you cannot update the project. After the deletion + * completes, the project is not retrievable by the {@link ResourceManager#get} and + * {@link ResourceManager#list} methods. The caller must have modify permissions for this project. + * + * @see Cloud + * Resource Manager delete + * @throws ResourceManagerException upon failure + */ + void delete(String projectId); + + /** + * Retrieves the project identified by the specified project ID. + * + *

      Returns {@code null} if the project is not found or if the user doesn't have read + * permissions for the project. + * + * @see Cloud + * Resource Manager get + * @throws ResourceManagerException upon failure + */ + Project get(String projectId, ProjectGetOption... options); + + /** + * Lists the projects visible to the current user. + * + *

      This method returns projects in an unspecified order. New projects do not necessarily appear + * at the end of the list. Use {@link ProjectListOption} to filter this list, set page size, and + * set page tokens. Note that pagination is currently not implemented by the Cloud Resource + * Manager API. + * + * @see Cloud + * Resource Manager list + * @return {@code Page}, a page of projects + * @throws ResourceManagerException upon failure + */ + Page list(ProjectListOption... options); + + /** + * Replaces the attributes of the project. + * + *

      The caller must have modify permissions for this project. + * + * @see Cloud + * Resource Manager update + * @return the Project representing the new project metadata + * @throws ResourceManagerException upon failure + */ + Project replace(ProjectInfo newProject); + + /** + * Restores the project identified by the specified project ID. + * + *

      You can only use this method for a project that has a lifecycle state of + * {@link ProjectInfo.State#DELETE_REQUESTED}. After deletion starts, as indicated by a lifecycle + * state of {@link ProjectInfo.State#DELETE_IN_PROGRESS}, the project cannot be restored. The + * caller must have modify permissions for this project. + * + * @see Cloud + * Resource Manager undelete + * @throws ResourceManagerException upon failure + */ + void undelete(String projectId); +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerException.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerException.java new file mode 100644 index 000000000000..32a2998791c9 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerException.java @@ -0,0 +1,75 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import com.google.common.collect.ImmutableSet; +import com.google.gcloud.BaseServiceException; +import com.google.gcloud.RetryHelper.RetryHelperException; +import com.google.gcloud.RetryHelper.RetryInterruptedException; + +import java.io.IOException; +import java.util.Set; + +/** + * Resource Manager service exception. + * + * @see Google Cloud + * Resource Manager error codes + */ +public class ResourceManagerException extends BaseServiceException { + + // see https://cloud.google.com/resource-manager/v1/errors/core_errors + private static final Set RETRYABLE_ERRORS = ImmutableSet.of( + new Error(503, null), + new Error(500, null), + new Error(429, null), + new Error(403, "concurrentLimitExceeded"), + new Error(403, "limitExceeded"), + new Error(403, "rateLimitExceeded"), + new Error(403, "rateLimitExceededUnreg"), + new Error(403, "servingLimitExceeded"), + new Error(403, "userRateLimitExceeded"), + new Error(403, "userRateLimitExceededUnreg"), + new Error(403, "variableTermLimitExceeded")); + private static final long serialVersionUID = -9207194488966554136L; + + public ResourceManagerException(int code, String message) { + super(code, message, null, true); + } + + public ResourceManagerException(IOException exception) { + super(exception, true); + } + + @Override + protected Set retryableErrors() { + return RETRYABLE_ERRORS; + } + + /** + * Translate RetryHelperException to the ResourceManagerException that caused the error. This + * method will always throw an exception. + * + * @throws ResourceManagerException when {@code ex} was caused by a {@code + * ResourceManagerException} + * @throws RetryInterruptedException when {@code ex} is a {@code RetryInterruptedException} + */ + static ResourceManagerException translateAndThrow(RetryHelperException ex) { + BaseServiceException.translateAndPropagateIfPossible(ex); + throw new ResourceManagerException(UNKNOWN_CODE, ex.getMessage()); + } +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerFactory.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerFactory.java new file mode 100644 index 000000000000..256fc321e4e1 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerFactory.java @@ -0,0 +1,25 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import com.google.gcloud.ServiceFactory; + +/** + * An interface for ResourceManager factories. + */ +public interface ResourceManagerFactory + extends ServiceFactory {} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerImpl.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerImpl.java new file mode 100644 index 000000000000..e087caab5966 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerImpl.java @@ -0,0 +1,191 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.gcloud.RetryHelper.runWithRetries; + +import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import com.google.common.collect.Maps; +import com.google.gcloud.BaseService; +import com.google.gcloud.Page; +import com.google.gcloud.PageImpl; +import com.google.gcloud.PageImpl.NextPageFetcher; +import com.google.gcloud.RetryHelper.RetryHelperException; +import com.google.gcloud.spi.ResourceManagerRpc; +import com.google.gcloud.spi.ResourceManagerRpc.Tuple; + +import java.util.Map; +import java.util.concurrent.Callable; + +final class ResourceManagerImpl + extends BaseService implements ResourceManager { + + private final ResourceManagerRpc resourceManagerRpc; + + ResourceManagerImpl(ResourceManagerOptions options) { + super(options); + resourceManagerRpc = options.rpc(); + } + + @Override + public Project create(final ProjectInfo project) { + try { + return Project.fromPb(this, runWithRetries( + new Callable() { + @Override + public com.google.api.services.cloudresourcemanager.model.Project call() { + return resourceManagerRpc.create(project.toPb()); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelperException e) { + throw ResourceManagerException.translateAndThrow(e); + } + } + + @Override + public void delete(final String projectId) { + try { + runWithRetries(new Callable() { + @Override + public Void call() { + resourceManagerRpc.delete(projectId); + return null; + } + }, options().retryParams(), EXCEPTION_HANDLER); + } catch (RetryHelperException e) { + throw ResourceManagerException.translateAndThrow(e); + } + } + + @Override + public Project get(final String projectId, ProjectGetOption... options) { + final Map optionsMap = optionMap(options); + try { + com.google.api.services.cloudresourcemanager.model.Project answer = runWithRetries( + new Callable() { + @Override + public com.google.api.services.cloudresourcemanager.model.Project call() { + return resourceManagerRpc.get(projectId, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER); + return answer == null ? null : Project.fromPb(this, answer); + } catch (RetryHelperException e) { + throw ResourceManagerException.translateAndThrow(e); + } + } + + private static class ProjectPageFetcher implements NextPageFetcher { + + private static final long serialVersionUID = 2158209410430566961L; + private final Map requestOptions; + private final ResourceManagerOptions serviceOptions; + + ProjectPageFetcher(ResourceManagerOptions serviceOptions, String cursor, + Map optionMap) { + this.requestOptions = + PageImpl.nextRequestOptions(ResourceManagerRpc.Option.PAGE_TOKEN, cursor, optionMap); + this.serviceOptions = serviceOptions; + } + + @Override + public Page nextPage() { + return listProjects(serviceOptions, requestOptions); + } + } + + @Override + public Page list(ProjectListOption... options) { + return listProjects(options(), optionMap(options)); + } + + private static Page listProjects(final ResourceManagerOptions serviceOptions, + final Map optionsMap) { + try { + Tuple> result = + runWithRetries(new Callable>>() { + @Override + public Tuple> call() { + return serviceOptions.rpc().list(optionsMap); + } + }, + serviceOptions.retryParams(), EXCEPTION_HANDLER); + String cursor = result.x(); + Iterable projects = + result.y() == null + ? ImmutableList.of() : Iterables.transform( + result.y(), + new Function() { + @Override + public Project apply( + com.google.api.services.cloudresourcemanager.model.Project projectPb) { + return new Project( + serviceOptions.service(), new ProjectInfo.BuilderImpl(ProjectInfo.fromPb(projectPb))); + } + }); + return new PageImpl<>( + new ProjectPageFetcher(serviceOptions, cursor, optionsMap), cursor, projects); + } catch (RetryHelperException e) { + throw ResourceManagerException.translateAndThrow(e); + } + } + + @Override + public Project replace(final ProjectInfo newProject) { + try { + return Project.fromPb(this, runWithRetries( + new Callable() { + @Override + public com.google.api.services.cloudresourcemanager.model.Project call() { + return resourceManagerRpc.replace(newProject.toPb()); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelperException e) { + throw ResourceManagerException.translateAndThrow(e); + } + } + + @Override + public void undelete(final String projectId) { + try { + runWithRetries(new Callable() { + @Override + public Void call() { + resourceManagerRpc.undelete(projectId); + return null; + } + }, options().retryParams(), EXCEPTION_HANDLER); + } catch (RetryHelperException e) { + throw ResourceManagerException.translateAndThrow(e); + } + } + + private Map optionMap(Option... options) { + Map temp = Maps.newEnumMap(ResourceManagerRpc.Option.class); + for (Option option : options) { + Object prev = temp.put(option.rpcOption(), option.value()); + checkArgument(prev == null, "Duplicate option %s", option); + } + return ImmutableMap.copyOf(temp); + } +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerOptions.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerOptions.java new file mode 100644 index 000000000000..5c0c4baf1ecb --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManagerOptions.java @@ -0,0 +1,123 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import com.google.common.collect.ImmutableSet; +import com.google.gcloud.ServiceOptions; +import com.google.gcloud.spi.DefaultResourceManagerRpc; +import com.google.gcloud.spi.ResourceManagerRpc; +import com.google.gcloud.spi.ResourceManagerRpcFactory; + +import java.util.Set; + +public class ResourceManagerOptions + extends ServiceOptions { + + private static final long serialVersionUID = 538303101192527452L; + private static final String GCRM_SCOPE = "https://www.googleapis.com/auth/cloud-platform"; + private static final Set SCOPES = ImmutableSet.of(GCRM_SCOPE); + private static final String DEFAULT_HOST = "https://cloudresourcemanager.googleapis.com"; + + public static class DefaultResourceManagerFactory implements ResourceManagerFactory { + private static final ResourceManagerFactory INSTANCE = new DefaultResourceManagerFactory(); + + @Override + public ResourceManager create(ResourceManagerOptions options) { + return new ResourceManagerImpl(options); + } + } + + /** + * Returns a default {@code ResourceManagerOptions} instance. + */ + public static ResourceManagerOptions defaultInstance() { + return builder().build(); + } + + public static class DefaultResourceManagerRpcFactory implements ResourceManagerRpcFactory { + private static final ResourceManagerRpcFactory INSTANCE = + new DefaultResourceManagerRpcFactory(); + + @Override + public ResourceManagerRpc create(ResourceManagerOptions options) { + return new DefaultResourceManagerRpc(options); + } + } + + @Override + protected String defaultHost() { + return DEFAULT_HOST; + } + + public static class Builder extends ServiceOptions.Builder { + + private Builder() {} + + private Builder(ResourceManagerOptions options) { + super(options); + } + + @Override + public ResourceManagerOptions build() { + return new ResourceManagerOptions(this); + } + } + + private ResourceManagerOptions(Builder builder) { + super(ResourceManagerFactory.class, ResourceManagerRpcFactory.class, builder); + } + + @Override + protected boolean projectIdRequired() { + return false; + } + + @Override + protected ResourceManagerFactory defaultServiceFactory() { + return DefaultResourceManagerFactory.INSTANCE; + } + + @Override + protected ResourceManagerRpcFactory defaultRpcFactory() { + return DefaultResourceManagerRpcFactory.INSTANCE; + } + + @Override + protected Set scopes() { + return SCOPES; + } + + @Override + public boolean equals(Object obj) { + return obj instanceof ResourceManagerOptions && baseEquals((ResourceManagerOptions) obj); + } + + @Override + public int hashCode() { + return baseHashCode(); + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + public static Builder builder() { + return new Builder(); + } +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/package-info.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/package-info.java new file mode 100644 index 000000000000..22a81499eb7a --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/package-info.java @@ -0,0 +1,42 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * A client to Google Cloud Resource Manager. + * + *

      Here's a simple usage example for using gcloud-java-resourcemanager: + *

       {@code
      + * ResourceManager resourceManager = ResourceManagerOptions.defaultInstance().service();
      + * String myProjectId = "my-globally-unique-project-id"; // Change to a unique project ID.
      + * Project myProject = resourceManager.create(ProjectInfo.builder(myProjectId).build());
      + * Project newProject = myProject.toBuilder()
      + *     .addLabel("launch-status", "in-development")
      + *     .build()
      + *     .replace();
      + * Iterator projectIterator = resourceManager.list().iterateAll();
      + * System.out.println("Projects I can view:");
      + * while (projectIterator.hasNext()) {
      + *   System.out.println(projectIterator.next().projectId());
      + * }}
      + * + *

      Remember that you must authenticate using the Google Cloud SDK. See more about + * providing + * credentials here. + * + * @see Google Cloud Resource Manager + */ + +package com.google.gcloud.resourcemanager; diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/testing/LocalResourceManagerHelper.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/testing/LocalResourceManagerHelper.java new file mode 100644 index 000000000000..25c763276b3b --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/testing/LocalResourceManagerHelper.java @@ -0,0 +1,570 @@ +package com.google.gcloud.resourcemanager.testing; + +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkNotNull; +import static java.net.HttpURLConnection.HTTP_OK; + +import com.google.api.client.json.JsonFactory; +import com.google.api.services.cloudresourcemanager.model.Project; +import com.google.common.base.Joiner; +import com.google.common.base.Objects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.io.ByteStreams; +import com.google.gcloud.resourcemanager.ResourceManagerOptions; + +import com.sun.net.httpserver.Headers; +import com.sun.net.httpserver.HttpExchange; +import com.sun.net.httpserver.HttpHandler; +import com.sun.net.httpserver.HttpServer; + +import org.joda.time.format.ISODateTimeFormat; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.InetSocketAddress; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.zip.GZIPInputStream; + +/** + * Utility to create a local Resource Manager mock for testing. + * + *

      The mock runs in a separate thread, listening for HTTP requests on the local machine at an + * ephemeral port. + */ +@SuppressWarnings("restriction") +public class LocalResourceManagerHelper { + private static final Logger log = Logger.getLogger(LocalResourceManagerHelper.class.getName()); + private static final JsonFactory jsonFactory = + new com.google.api.client.json.jackson.JacksonFactory(); + private static final Random PROJECT_NUMBER_GENERATOR = new Random(); + private static final String VERSION = "v1beta1"; + private static final String CONTEXT = "/" + VERSION + "/projects"; + private static final URI BASE_CONTEXT; + private static final Set SUPPORTED_COMPRESSION_ENCODINGS = + ImmutableSet.of("gzip", "x-gzip"); + + static { + try { + BASE_CONTEXT = new URI(CONTEXT); + } catch (URISyntaxException e) { + throw new RuntimeException( + "Could not initialize LocalResourceManagerHelper due to URISyntaxException.", e); + } + } + + // see https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects + private static final Set PERMISSIBLE_PROJECT_NAME_PUNCTUATION = + ImmutableSet.of('-', '\'', '"', ' ', '!'); + + private final HttpServer server; + private final ConcurrentHashMap projects = new ConcurrentHashMap<>(); + private final int port; + + private static class Response { + private final int code; + private final String body; + + Response(int code, String body) { + this.code = code; + this.body = body; + } + + int code() { + return code; + } + + String body() { + return body; + } + } + + private enum Error { + ALREADY_EXISTS(409, "global", "alreadyExists", "ALREADY_EXISTS"), + PERMISSION_DENIED(403, "global", "forbidden", "PERMISSION_DENIED"), + FAILED_PRECONDITION(400, "global", "failedPrecondition", "FAILED_PRECONDITION"), + INVALID_ARGUMENT(400, "global", "badRequest", "INVALID_ARGUMENT"), + BAD_REQUEST(400, "global", "badRequest", "BAD_REQUEST"), + INTERNAL_ERROR(500, "global", "internalError", "INTERNAL_ERROR"); + + private final int code; + private final String domain; + private final String reason; + private final String status; + + Error(int code, String domain, String reason, String status) { + this.code = code; + this.domain = domain; + this.reason = reason; + this.status = status; + } + + Response response(String message) { + try { + return new Response(code, toJson(message)); + } catch (IOException e) { + return Error.INTERNAL_ERROR.response("Error when generating JSON error response"); + } + } + + private String toJson(String message) throws IOException { + Map errors = new HashMap<>(); + errors.put("domain", domain); + errors.put("message", message); + errors.put("reason", reason); + Map args = new HashMap<>(); + args.put("errors", ImmutableList.of(errors)); + args.put("code", code); + args.put("message", message); + args.put("status", status); + return jsonFactory.toString(ImmutableMap.of("error", args)); + } + } + + private class RequestHandler implements HttpHandler { + @Override + public void handle(HttpExchange exchange) { + // see https://cloud.google.com/resource-manager/reference/rest/ + Response response; + String path = BASE_CONTEXT.relativize(exchange.getRequestURI()).getPath(); + String requestMethod = exchange.getRequestMethod(); + try { + switch (requestMethod) { + case "POST": + if (path.endsWith(":undelete")) { + response = undelete(projectIdFromUri(path)); + } else { + String requestBody = + decodeContent(exchange.getRequestHeaders(), exchange.getRequestBody()); + response = create(jsonFactory.fromString(requestBody, Project.class)); + } + break; + case "DELETE": + response = delete(projectIdFromUri(path)); + break; + case "GET": + if (!path.isEmpty()) { + response = + get(projectIdFromUri(path), parseFields(exchange.getRequestURI().getQuery())); + } else { + response = list(parseListOptions(exchange.getRequestURI().getQuery())); + } + break; + case "PUT": + String requestBody = + decodeContent(exchange.getRequestHeaders(), exchange.getRequestBody()); + response = + replace(projectIdFromUri(path), jsonFactory.fromString(requestBody, Project.class)); + break; + default: + response = Error.BAD_REQUEST.response( + "The server could not understand the following request URI: " + requestMethod + " " + + path); + } + } catch (IOException e) { + response = Error.BAD_REQUEST.response(e.getMessage()); + } + writeResponse(exchange, response); + } + } + + private static void writeResponse(HttpExchange exchange, Response response) { + exchange.getResponseHeaders().set("Content-type", "application/json; charset=UTF-8"); + OutputStream outputStream = exchange.getResponseBody(); + try { + exchange.getResponseHeaders().add("Connection", "close"); + exchange.sendResponseHeaders(response.code(), response.body().length()); + outputStream.write(response.body().getBytes(StandardCharsets.UTF_8)); + outputStream.close(); + } catch (IOException e) { + log.log(Level.WARNING, "IOException encountered when sending response.", e); + } + } + + private static String decodeContent(Headers headers, InputStream inputStream) throws IOException { + List contentEncoding = headers.get("Content-encoding"); + InputStream input = inputStream; + try { + if (contentEncoding != null && !contentEncoding.isEmpty()) { + String encoding = contentEncoding.get(0); + if (SUPPORTED_COMPRESSION_ENCODINGS.contains(encoding)) { + input = new GZIPInputStream(inputStream); + } else if (!encoding.equals("identity")) { + throw new IOException( + "The request has the following unsupported HTTP content encoding: " + encoding); + } + } + return new String(ByteStreams.toByteArray(input), StandardCharsets.UTF_8); + } catch (IOException e) { + throw new IOException("Exception encountered when decoding request content.", e); + } + } + + private static String projectIdFromUri(String path) throws IOException { + if (path.isEmpty()) { + throw new IOException("The URI path '" + path + "' doesn't have a project ID."); + } + return path.split(":")[0]; + } + + private static String[] parseFields(String query) { + if (query != null && !query.isEmpty()) { + String[] querySplit = query.split("="); + return querySplit.length > 1 ? querySplit[1].split(",") : null; + } + return null; + } + + private static Map parseListOptions(String query) { + Map options = new HashMap<>(); + if (query != null) { + String[] args = query.split("&"); + for (String arg : args) { + String[] argEntry = arg.split("="); + switch (argEntry[0]) { + case "fields": + // List fields are in the form "projects(field1, field2, ...)" + options.put( + "fields", + argEntry[1].substring("projects(".length(), argEntry[1].length() - 1).split(",")); + break; + case "filter": + options.put("filter", argEntry[1].split(" ")); + break; + case "pageToken": + // support pageToken when Cloud Resource Manager supports this (#421) + break; + case "pageSize": + // support pageSize when Cloud Resource Manager supports this (#421) + break; + } + } + } + return options; + } + + private static String checkForProjectErrors(Project project) { + if (project.getProjectId() == null) { + return "Project ID cannot be empty."; + } + if (!isValidIdOrLabel(project.getProjectId(), 6, 30)) { + return "Project " + project.getProjectId() + " has an invalid ID." + + " See https://cloud.google.com/resource-manager/reference/rest/" + VERSION + "/projects" + + " for more information."; + } + if (project.getName() != null) { + for (char c : project.getName().toCharArray()) { + if (!PERMISSIBLE_PROJECT_NAME_PUNCTUATION.contains(c) && !Character.isLetterOrDigit(c)) { + return "Project " + project.getProjectId() + " has an invalid name." + + " See https://cloud.google.com/resource-manager/reference/rest/" + VERSION + + "/projects for more information."; + } + } + } + if (project.getLabels() != null) { + if (project.getLabels().size() > 256) { + return "Project " + project.getProjectId() + " exceeds the limit of 256 labels."; + } + for (Map.Entry entry : project.getLabels().entrySet()) { + if (!isValidIdOrLabel(entry.getKey(), 1, 63) + || !isValidIdOrLabel(entry.getValue(), 0, 63)) { + return "Project " + project.getProjectId() + " has an invalid label entry." + + " See https://cloud.google.com/resource-manager/reference/rest/" + VERSION + + "/projects for more information."; + } + } + } + return null; + } + + private static boolean isValidIdOrLabel(String value, int minLength, int maxLength) { + for (char c : value.toCharArray()) { + if (c != '-' && !Character.isDigit(c) && !Character.isLowerCase(c)) { + return false; + } + } + if (!value.isEmpty() && (!Character.isLetter(value.charAt(0)) || value.endsWith("-"))) { + return false; + } + return value.length() >= minLength && value.length() <= maxLength; + } + + Response create(Project project) { + String customErrorMessage = checkForProjectErrors(project); + if (customErrorMessage != null) { + return Error.INVALID_ARGUMENT.response(customErrorMessage); + } else { + project.setLifecycleState("ACTIVE"); + project.setProjectNumber(Math.abs(PROJECT_NUMBER_GENERATOR.nextLong() % Long.MAX_VALUE)); + project.setCreateTime(ISODateTimeFormat.dateTime().print(System.currentTimeMillis())); + if (projects.putIfAbsent(project.getProjectId(), project) != null) { + return Error.ALREADY_EXISTS.response( + "A project with the same project ID (" + project.getProjectId() + ") already exists."); + } + try { + String createdProjectStr = jsonFactory.toString(project); + return new Response(HTTP_OK, createdProjectStr); + } catch (IOException e) { + return Error.INTERNAL_ERROR.response("Error serializing project " + project.getProjectId()); + } + } + } + + synchronized Response delete(String projectId) { + Project project = projects.get(projectId); + if (project == null) { + return Error.PERMISSION_DENIED.response( + "Error when deleting " + projectId + " because the project was not found."); + } + if (!project.getLifecycleState().equals("ACTIVE")) { + return Error.FAILED_PRECONDITION.response( + "Error when deleting " + projectId + " because the lifecycle state was not ACTIVE."); + } else { + project.setLifecycleState("DELETE_REQUESTED"); + return new Response(HTTP_OK, "{}"); + } + } + + Response get(String projectId, String[] fields) { + Project project = projects.get(projectId); + if (project != null) { + try { + return new Response(HTTP_OK, jsonFactory.toString(extractFields(project, fields))); + } catch (IOException e) { + return Error.INTERNAL_ERROR.response( + "Error when serializing project " + project.getProjectId()); + } + } else { + return Error.PERMISSION_DENIED.response("Project " + projectId + " not found."); + } + } + + Response list(Map options) { + // Use pageSize and pageToken options when Cloud Resource Manager does so (#421) + List projectsSerialized = new ArrayList<>(); + String[] filters = (String[]) options.get("filter"); + if (filters != null && !isValidFilter(filters)) { + return Error.INVALID_ARGUMENT.response("Could not parse the filter."); + } + String[] fields = (String[]) options.get("fields"); + for (Project p : projects.values()) { + boolean includeProject = includeProject(p, filters); + if (includeProject) { + try { + projectsSerialized.add(jsonFactory.toString(extractFields(p, fields))); + } catch (IOException e) { + return Error.INTERNAL_ERROR.response( + "Error when serializing project " + p.getProjectId()); + } + } + } + StringBuilder responseBody = new StringBuilder(); + responseBody.append("{\"projects\": ["); + Joiner.on(",").appendTo(responseBody, projectsSerialized); + responseBody.append("]}"); + return new Response(HTTP_OK, responseBody.toString()); + } + + private static boolean isValidFilter(String[] filters) { + for (String filter : filters) { + String field = filter.toLowerCase().split(":")[0]; + if (!("id".equals(field) || "name".equals(field) || field.startsWith("labels."))) { + return false; + } + } + return true; + } + + private static boolean includeProject(Project project, String[] filters) { + if (filters == null) { + return true; + } + for (String filter : filters) { + String[] filterEntry = filter.toLowerCase().split(":"); + String filterType = filterEntry[0]; + if ("id".equals(filterType)) { + if (!satisfiesFilter(project.getProjectId(), filterEntry[1])) { + return false; + } + } else if ("name".equals(filterType)) { + if (!satisfiesFilter(project.getName(), filterEntry[1])) { + return false; + } + } else if (filterType.startsWith("labels.")) { + String labelKey = filterType.substring("labels.".length()); + if (project.getLabels() != null) { + String labelValue = project.getLabels().get(labelKey); + if (!satisfiesFilter(labelValue, filterEntry[1])) { + return false; + } + } + } + } + return true; + } + + private static boolean satisfiesFilter(String projectValue, String filterValue) { + if (projectValue == null) { + return false; + } + return "*".equals(filterValue) || filterValue.equals(projectValue.toLowerCase()); + } + + private static Project extractFields(Project fullProject, String[] fields) { + if (fields == null) { + return fullProject; + } + Project project = new Project(); + for (String field : fields) { + switch (field) { + case "createTime": + project.setCreateTime(fullProject.getCreateTime()); + break; + case "labels": + project.setLabels(fullProject.getLabels()); + break; + case "lifecycleState": + project.setLifecycleState(fullProject.getLifecycleState()); + break; + case "name": + project.setName(fullProject.getName()); + break; + case "parent": + project.setParent(fullProject.getParent()); + break; + case "projectId": + project.setProjectId(fullProject.getProjectId()); + break; + case "projectNumber": + project.setProjectNumber(fullProject.getProjectNumber()); + break; + } + } + return project; + } + + synchronized Response replace(String projectId, Project project) { + Project originalProject = projects.get(projectId); + if (originalProject == null) { + return Error.PERMISSION_DENIED.response( + "Error when replacing " + projectId + " because the project was not found."); + } else if (!originalProject.getLifecycleState().equals("ACTIVE")) { + return Error.FAILED_PRECONDITION.response( + "Error when replacing " + projectId + " because the lifecycle state was not ACTIVE."); + } else if (!Objects.equal(originalProject.getParent(), project.getParent())) { + return Error.INVALID_ARGUMENT.response( + "The server currently only supports setting the parent once " + + "and does not allow unsetting it."); + } + project.setProjectId(projectId); + project.setLifecycleState(originalProject.getLifecycleState()); + project.setCreateTime(originalProject.getCreateTime()); + project.setProjectNumber(originalProject.getProjectNumber()); + // replace cannot fail because both this method and removeProject are synchronized + projects.replace(projectId, project); + try { + return new Response(HTTP_OK, jsonFactory.toString(project)); + } catch (IOException e) { + return Error.INTERNAL_ERROR.response("Error when serializing project " + projectId); + } + } + + synchronized Response undelete(String projectId) { + Project project = projects.get(projectId); + Response response; + if (project == null) { + response = Error.PERMISSION_DENIED.response( + "Error when undeleting " + projectId + " because the project was not found."); + } else if (!project.getLifecycleState().equals("DELETE_REQUESTED")) { + response = Error.FAILED_PRECONDITION.response("Error when undeleting " + projectId + + " because the lifecycle state was not DELETE_REQUESTED."); + } else { + project.setLifecycleState("ACTIVE"); + response = new Response(HTTP_OK, "{}"); + } + return response; + } + + private LocalResourceManagerHelper() { + try { + server = HttpServer.create(new InetSocketAddress(0), 0); + port = server.getAddress().getPort(); + server.createContext(CONTEXT, new RequestHandler()); + } catch (IOException e) { + throw new RuntimeException("Could not bind the mock Resource Manager server.", e); + } + } + + /** + * Creates a LocalResourceManagerHelper object that listens to requests on the local machine. + */ + public static LocalResourceManagerHelper create() { + return new LocalResourceManagerHelper(); + } + + /** + * Returns a ResourceManagerOptions instance that sets the host to use the mock server. + */ + public ResourceManagerOptions options() { + return ResourceManagerOptions.builder().host("http://localhost:" + port).build(); + } + + /** + * Starts the thread that runs the Resource Manager server. + */ + public void start() { + server.start(); + } + + /** + * Stops the thread that runs the mock Resource Manager server. + */ + public void stop() { + server.stop(1); + } + + /** + * Utility method to change the lifecycle state of the specified project. + * + * @return true if the lifecycle state was successfully updated, false otherwise + */ + public synchronized boolean changeLifecycleState(String projectId, String lifecycleState) { + checkArgument( + "ACTIVE".equals(lifecycleState) || "DELETE_REQUESTED".equals(lifecycleState) + || "DELETE_IN_PROGRESS".equals(lifecycleState), + "Lifecycle state must be ACTIVE, DELETE_REQUESTED, or DELETE_IN_PROGRESS"); + Project project = projects.get(checkNotNull(projectId)); + if (project != null) { + project.setLifecycleState(lifecycleState); + return true; + } + return false; + } + + /** + * Utility method to remove the specified project. + * + *

      This method can be used to fully remove a project (to mimic when the server completely + * deletes a project). + * + * @return true if the project was successfully deleted, false if the project didn't exist + */ + public synchronized boolean removeProject(String projectId) { + // Because this method is synchronized, any code that relies on non-atomic read/write operations + // should not fail if that code is also synchronized. + return projects.remove(checkNotNull(projectId)) != null; + } +} diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/testing/package-info.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/testing/package-info.java new file mode 100644 index 000000000000..7e5519f7d085 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/testing/package-info.java @@ -0,0 +1,32 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * A testing helper for Google Cloud Resource Manager. + * + *

      A simple usage example: + * Before the test: + *

       {@code
      + * LocalResourceManagerHelper resourceManagerHelper = LocalResourceManagerHelper.create();
      + * ResourceManager resourceManager = resourceManagerHelper.options().service();
      + * } 
      + * + *

      After the test: + *

       {@code
      + * resourceManagerHelper.stop();
      + * } 
      + */ +package com.google.gcloud.resourcemanager.testing; diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/DefaultResourceManagerRpc.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/DefaultResourceManagerRpc.java new file mode 100644 index 000000000000..61c622fa0c33 --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/DefaultResourceManagerRpc.java @@ -0,0 +1,112 @@ +package com.google.gcloud.spi; + +import static com.google.gcloud.spi.ResourceManagerRpc.Option.FIELDS; +import static com.google.gcloud.spi.ResourceManagerRpc.Option.FILTER; +import static com.google.gcloud.spi.ResourceManagerRpc.Option.PAGE_SIZE; +import static com.google.gcloud.spi.ResourceManagerRpc.Option.PAGE_TOKEN; +import static java.net.HttpURLConnection.HTTP_FORBIDDEN; +import static java.net.HttpURLConnection.HTTP_NOT_FOUND; + +import com.google.api.client.http.HttpRequestInitializer; +import com.google.api.client.http.HttpTransport; +import com.google.api.client.json.jackson.JacksonFactory; +import com.google.api.services.cloudresourcemanager.Cloudresourcemanager; +import com.google.api.services.cloudresourcemanager.model.ListProjectsResponse; +import com.google.api.services.cloudresourcemanager.model.Project; +import com.google.gcloud.resourcemanager.ResourceManagerException; +import com.google.gcloud.resourcemanager.ResourceManagerOptions; + +import java.io.IOException; +import java.util.Map; + +public class DefaultResourceManagerRpc implements ResourceManagerRpc { + + private final Cloudresourcemanager resourceManager; + + public DefaultResourceManagerRpc(ResourceManagerOptions options) { + HttpTransport transport = options.httpTransportFactory().create(); + HttpRequestInitializer initializer = options.httpRequestInitializer(); + resourceManager = + new Cloudresourcemanager.Builder(transport, new JacksonFactory(), initializer) + .setRootUrl(options.host()) + .setApplicationName(options.applicationName()) + .build(); + } + + private static ResourceManagerException translate(IOException exception) { + return new ResourceManagerException(exception); + } + + @Override + public Project create(Project project) throws ResourceManagerException { + try { + return resourceManager.projects().create(project).execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public void delete(String projectId) throws ResourceManagerException { + try { + resourceManager.projects().delete(projectId).execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Project get(String projectId, Map options) throws ResourceManagerException { + try { + return resourceManager.projects() + .get(projectId) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + ResourceManagerException translated = translate(ex); + if (translated.code() == HTTP_FORBIDDEN || translated.code() == HTTP_NOT_FOUND) { + // Service can return either 403 or 404 to signify that the project doesn't exist. + return null; + } else { + throw translated; + } + } + } + + @Override + public Tuple> list(Map options) + throws ResourceManagerException { + try { + ListProjectsResponse response = resourceManager.projects() + .list() + .setFields(FIELDS.getString(options)) + .setFilter(FILTER.getString(options)) + .setPageSize(PAGE_SIZE.getInt(options)) + .setPageToken(PAGE_TOKEN.getString(options)) + .execute(); + return Tuple.>of( + response.getNextPageToken(), response.getProjects()); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public void undelete(String projectId) throws ResourceManagerException { + try { + resourceManager.projects().undelete(projectId).execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Project replace(Project project) throws ResourceManagerException { + try { + return resourceManager.projects().update(project.getProjectId(), project).execute(); + } catch (IOException ex) { + throw translate(ex); + } + } +} + diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/ResourceManagerRpc.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/ResourceManagerRpc.java new file mode 100644 index 000000000000..52dfc2d2368e --- /dev/null +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/ResourceManagerRpc.java @@ -0,0 +1,91 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.spi; + +import com.google.api.services.cloudresourcemanager.model.Project; +import com.google.gcloud.resourcemanager.ResourceManagerException; + +import java.util.Map; + +public interface ResourceManagerRpc { + + enum Option { + FILTER("filter"), + FIELDS("fields"), + PAGE_SIZE("pageSize"), + PAGE_TOKEN("pageToken"); + + private final String value; + + Option(String value) { + this.value = value; + } + + public String value() { + return value; + } + + @SuppressWarnings("unchecked") + T get(Map options) { + return (T) options.get(this); + } + + String getString(Map options) { + return get(options); + } + + Integer getInt(Map options) { + return get(options); + } + } + + class Tuple { + private final X x; + private final Y y; + + private Tuple(X x, Y y) { + this.x = x; + this.y = y; + } + + public static Tuple of(X x, Y y) { + return new Tuple<>(x, y); + } + + public X x() { + return x; + } + + public Y y() { + return y; + } + } + + Project create(Project project) throws ResourceManagerException; + + void delete(String projectId) throws ResourceManagerException; + + Project get(String projectId, Map options) throws ResourceManagerException; + + Tuple> list(Map options) throws ResourceManagerException; + + void undelete(String projectId) throws ResourceManagerException; + + Project replace(Project project) throws ResourceManagerException; + + // TODO(ajaykannan): implement "Organization" functionality when available (issue #319) +} diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/ListResult.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/ResourceManagerRpcFactory.java similarity index 62% rename from gcloud-java-storage/src/main/java/com/google/gcloud/storage/ListResult.java rename to gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/ResourceManagerRpcFactory.java index 62b1f442310c..c2c607c0c205 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/ListResult.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/ResourceManagerRpcFactory.java @@ -14,21 +14,14 @@ * limitations under the License. */ -package com.google.gcloud.storage; +package com.google.gcloud.spi; + +import com.google.gcloud.resourcemanager.ResourceManagerOptions; /** - * Interface for Google Cloud storage list result. + * An interface for Resource Manager RPC factory. + * Implementation will be loaded via {@link java.util.ServiceLoader}. */ -public interface ListResult extends Iterable { - - /** - * Returns the cursor for the nextPage or {@code null} if no more results. - */ - String nextPageCursor(); - - /** - * Returns the results of the nextPage or {@code null} if no more result. - */ - ListResult nextPage(); - +public interface ResourceManagerRpcFactory + extends ServiceRpcFactory { } diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/LocalResourceManagerHelperTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/LocalResourceManagerHelperTest.java new file mode 100644 index 000000000000..7eb0156d4e56 --- /dev/null +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/LocalResourceManagerHelperTest.java @@ -0,0 +1,539 @@ +package com.google.gcloud.resourcemanager; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.resourcemanager.testing.LocalResourceManagerHelper; +import com.google.gcloud.spi.DefaultResourceManagerRpc; +import com.google.gcloud.spi.ResourceManagerRpc; +import com.google.gcloud.spi.ResourceManagerRpc.Tuple; + +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.util.HashMap; +import java.util.Map; + +public class LocalResourceManagerHelperTest { + + private static final String DEFAULT_PARENT_ID = "12345"; + private static final String DEFAULT_PARENT_TYPE = "organization"; + private static final com.google.api.services.cloudresourcemanager.model.ResourceId PARENT = + new com.google.api.services.cloudresourcemanager.model.ResourceId() + .setId(DEFAULT_PARENT_ID) + .setType(DEFAULT_PARENT_TYPE); + private static final Map EMPTY_RPC_OPTIONS = ImmutableMap.of(); + private static final LocalResourceManagerHelper RESOURCE_MANAGER_HELPER = + LocalResourceManagerHelper.create(); + private static final ResourceManagerRpc rpc = + new DefaultResourceManagerRpc(RESOURCE_MANAGER_HELPER.options()); + private static final com.google.api.services.cloudresourcemanager.model.Project PARTIAL_PROJECT = + new com.google.api.services.cloudresourcemanager.model.Project().setProjectId( + "partial-project"); + private static final com.google.api.services.cloudresourcemanager.model.Project COMPLETE_PROJECT = + new com.google.api.services.cloudresourcemanager.model.Project() + .setProjectId("complete-project") + .setName("full project") + .setLabels(ImmutableMap.of("k1", "v1", "k2", "v2")); + private static final com.google.api.services.cloudresourcemanager.model.Project + PROJECT_WITH_PARENT = + copyFrom(COMPLETE_PROJECT).setProjectId("project-with-parent-id").setParent(PARENT); + + @BeforeClass + public static void beforeClass() { + RESOURCE_MANAGER_HELPER.start(); + } + + private static com.google.api.services.cloudresourcemanager.model.Project copyFrom( + com.google.api.services.cloudresourcemanager.model.Project from) { + return new com.google.api.services.cloudresourcemanager.model.Project() + .setProjectId(from.getProjectId()) + .setName(from.getName()) + .setLabels(from.getLabels() != null ? ImmutableMap.copyOf(from.getLabels()) : null) + .setProjectNumber(from.getProjectNumber()) + .setCreateTime(from.getCreateTime()) + .setLifecycleState(from.getLifecycleState()) + .setParent(from.getParent() != null ? from.getParent().clone() : null); + } + + private void clearProjects() { + for (com.google.api.services.cloudresourcemanager.model.Project project : + rpc.list(EMPTY_RPC_OPTIONS).y()) { + RESOURCE_MANAGER_HELPER.removeProject(project.getProjectId()); + } + } + + @Before + public void setUp() { + clearProjects(); + } + + @AfterClass + public static void afterClass() { + RESOURCE_MANAGER_HELPER.stop(); + } + + @Test + public void testCreate() { + com.google.api.services.cloudresourcemanager.model.Project returnedProject = + rpc.create(PARTIAL_PROJECT); + compareReadWriteFields(PARTIAL_PROJECT, returnedProject); + assertEquals("ACTIVE", returnedProject.getLifecycleState()); + assertNull(returnedProject.getLabels()); + assertNull(returnedProject.getName()); + assertNull(returnedProject.getParent()); + assertNotNull(returnedProject.getProjectNumber()); + assertNotNull(returnedProject.getCreateTime()); + try { + rpc.create(PARTIAL_PROJECT); + fail("Should fail, project already exists."); + } catch (ResourceManagerException e) { + assertEquals(409, e.code()); + assertTrue(e.getMessage().startsWith("A project with the same project ID") + && e.getMessage().endsWith("already exists.")); + } + returnedProject = rpc.create(PROJECT_WITH_PARENT); + compareReadWriteFields(PROJECT_WITH_PARENT, returnedProject); + assertEquals("ACTIVE", returnedProject.getLifecycleState()); + assertNotNull(returnedProject.getProjectNumber()); + assertNotNull(returnedProject.getCreateTime()); + } + + @Test + public void testIsInvalidProjectId() { + com.google.api.services.cloudresourcemanager.model.Project project = + new com.google.api.services.cloudresourcemanager.model.Project(); + String invalidIDMessageSubstring = "invalid ID"; + expectInvalidArgumentException(project, "Project ID cannot be empty."); + project.setProjectId("abcde"); + expectInvalidArgumentException(project, invalidIDMessageSubstring); + project.setProjectId("this-project-id-is-more-than-thirty-characters-long"); + expectInvalidArgumentException(project, invalidIDMessageSubstring); + project.setProjectId("project-id-with-invalid-character-?"); + expectInvalidArgumentException(project, invalidIDMessageSubstring); + project.setProjectId("-invalid-start-character"); + expectInvalidArgumentException(project, invalidIDMessageSubstring); + project.setProjectId("invalid-ending-character-"); + expectInvalidArgumentException(project, invalidIDMessageSubstring); + project.setProjectId("some-valid-project-id-12345"); + rpc.create(project); + assertNotNull(rpc.get(project.getProjectId(), EMPTY_RPC_OPTIONS)); + } + + private void expectInvalidArgumentException( + com.google.api.services.cloudresourcemanager.model.Project project, + String errorMessageSubstring) { + try { + rpc.create(project); + fail("Should fail because of an invalid argument."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertTrue(e.getMessage().contains(errorMessageSubstring)); + } + } + + @Test + public void testIsInvalidProjectName() { + com.google.api.services.cloudresourcemanager.model.Project project = + new com.google.api.services.cloudresourcemanager.model.Project().setProjectId( + "some-project-id"); + rpc.create(project); + assertNull(rpc.get(project.getProjectId(), EMPTY_RPC_OPTIONS).getName()); + RESOURCE_MANAGER_HELPER.removeProject(project.getProjectId()); + project.setName("This is a valid name-'\"!"); + rpc.create(project); + assertEquals(project.getName(), rpc.get(project.getProjectId(), EMPTY_RPC_OPTIONS).getName()); + RESOURCE_MANAGER_HELPER.removeProject(project.getProjectId()); + project.setName("invalid-character-,"); + try { + rpc.create(project); + fail("Should fail because of invalid project name."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertTrue(e.getMessage().contains("invalid name")); + } + } + + @Test + public void testIsInvalidProjectLabels() { + com.google.api.services.cloudresourcemanager.model.Project project = + new com.google.api.services.cloudresourcemanager.model.Project().setProjectId( + "some-valid-project-id"); + String invalidLabelMessageSubstring = "invalid label entry"; + project.setLabels(ImmutableMap.of("", "v1")); + expectInvalidArgumentException(project, invalidLabelMessageSubstring); + project.setLabels(ImmutableMap.of( + "this-project-label-is-more-than-sixty-three-characters-long-so-it-should-fail", "v1")); + expectInvalidArgumentException(project, invalidLabelMessageSubstring); + project.setLabels(ImmutableMap.of( + "k1", "this-project-label-is-more-than-sixty-three-characters-long-so-it-should-fail")); + expectInvalidArgumentException(project, invalidLabelMessageSubstring); + project.setLabels(ImmutableMap.of("k1?", "v1")); + expectInvalidArgumentException(project, invalidLabelMessageSubstring); + project.setLabels(ImmutableMap.of("k1", "v1*")); + expectInvalidArgumentException(project, invalidLabelMessageSubstring); + project.setLabels(ImmutableMap.of("-k1", "v1")); + expectInvalidArgumentException(project, invalidLabelMessageSubstring); + project.setLabels(ImmutableMap.of("k1", "-v1")); + expectInvalidArgumentException(project, invalidLabelMessageSubstring); + project.setLabels(ImmutableMap.of("k1-", "v1")); + expectInvalidArgumentException(project, invalidLabelMessageSubstring); + project.setLabels(ImmutableMap.of("k1", "v1-")); + expectInvalidArgumentException(project, invalidLabelMessageSubstring); + Map tooManyLabels = new HashMap<>(); + for (int i = 0; i < 257; i++) { + tooManyLabels.put("k" + Integer.toString(i), "v" + Integer.toString(i)); + } + project.setLabels(tooManyLabels); + expectInvalidArgumentException(project, "exceeds the limit of 256 labels"); + project.setLabels(ImmutableMap.of("k-1", "")); + rpc.create(project); + assertNotNull(rpc.get(project.getProjectId(), EMPTY_RPC_OPTIONS)); + assertTrue(rpc.get(project.getProjectId(), EMPTY_RPC_OPTIONS) + .getLabels() + .get("k-1") + .isEmpty()); + } + + @Test + public void testDelete() { + rpc.create(COMPLETE_PROJECT); + rpc.delete(COMPLETE_PROJECT.getProjectId()); + assertEquals( + "DELETE_REQUESTED", + rpc.get(COMPLETE_PROJECT.getProjectId(), EMPTY_RPC_OPTIONS).getLifecycleState()); + try { + rpc.delete("some-nonexistant-project-id"); + fail("Should fail because the project doesn't exist."); + } catch (ResourceManagerException e) { + assertEquals(403, e.code()); + assertTrue(e.getMessage().contains("not found.")); + } + } + + @Test + public void testDeleteWhenDeleteInProgress() { + rpc.create(COMPLETE_PROJECT); + RESOURCE_MANAGER_HELPER.changeLifecycleState( + COMPLETE_PROJECT.getProjectId(), "DELETE_IN_PROGRESS"); + try { + rpc.delete(COMPLETE_PROJECT.getProjectId()); + fail("Should fail because the project is not ACTIVE."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertTrue(e.getMessage().contains("the lifecycle state was not ACTIVE")); + } + } + + @Test + public void testDeleteWhenDeleteRequested() { + rpc.create(COMPLETE_PROJECT); + RESOURCE_MANAGER_HELPER.changeLifecycleState( + COMPLETE_PROJECT.getProjectId(), "DELETE_REQUESTED"); + try { + rpc.delete(COMPLETE_PROJECT.getProjectId()); + fail("Should fail because the project is not ACTIVE."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertTrue(e.getMessage().contains("the lifecycle state was not ACTIVE")); + } + } + + @Test + public void testGet() { + rpc.create(COMPLETE_PROJECT); + com.google.api.services.cloudresourcemanager.model.Project returnedProject = + rpc.get(COMPLETE_PROJECT.getProjectId(), EMPTY_RPC_OPTIONS); + compareReadWriteFields(COMPLETE_PROJECT, returnedProject); + RESOURCE_MANAGER_HELPER.removeProject(COMPLETE_PROJECT.getProjectId()); + assertNull(rpc.get(COMPLETE_PROJECT.getProjectId(), EMPTY_RPC_OPTIONS)); + } + + @Test + public void testGetWithOptions() { + com.google.api.services.cloudresourcemanager.model.Project originalProject = + rpc.create(COMPLETE_PROJECT); + Map rpcOptions = new HashMap<>(); + rpcOptions.put(ResourceManagerRpc.Option.FIELDS, "projectId,name,createTime"); + com.google.api.services.cloudresourcemanager.model.Project returnedProject = + rpc.get(COMPLETE_PROJECT.getProjectId(), rpcOptions); + assertFalse(COMPLETE_PROJECT.equals(returnedProject)); + assertEquals(COMPLETE_PROJECT.getProjectId(), returnedProject.getProjectId()); + assertEquals(COMPLETE_PROJECT.getName(), returnedProject.getName()); + assertEquals(originalProject.getCreateTime(), returnedProject.getCreateTime()); + assertNull(returnedProject.getParent()); + assertNull(returnedProject.getProjectNumber()); + assertNull(returnedProject.getLifecycleState()); + assertNull(returnedProject.getLabels()); + } + + @Test + public void testList() { + Tuple> projects = + rpc.list(EMPTY_RPC_OPTIONS); + assertNull(projects.x()); // change this when #421 is resolved + assertFalse(projects.y().iterator().hasNext()); + rpc.create(COMPLETE_PROJECT); + RESOURCE_MANAGER_HELPER.changeLifecycleState( + COMPLETE_PROJECT.getProjectId(), "DELETE_REQUESTED"); + rpc.create(PROJECT_WITH_PARENT); + projects = rpc.list(EMPTY_RPC_OPTIONS); + for (com.google.api.services.cloudresourcemanager.model.Project p : projects.y()) { + if (p.getProjectId().equals(COMPLETE_PROJECT.getProjectId())) { + compareReadWriteFields(COMPLETE_PROJECT, p); + } else if (p.getProjectId().equals(PROJECT_WITH_PARENT.getProjectId())) { + compareReadWriteFields(PROJECT_WITH_PARENT, p); + } else { + fail("Unexpected project in list."); + } + } + } + + @Test + public void testListFieldOptions() { + Map rpcOptions = new HashMap<>(); + rpcOptions.put(ResourceManagerRpc.Option.FIELDS, "projects(projectId,name,labels)"); + rpcOptions.put(ResourceManagerRpc.Option.PAGE_TOKEN, "somePageToken"); + rpcOptions.put(ResourceManagerRpc.Option.PAGE_SIZE, 1); + rpc.create(PROJECT_WITH_PARENT); + Tuple> projects = + rpc.list(rpcOptions); + com.google.api.services.cloudresourcemanager.model.Project returnedProject = + projects.y().iterator().next(); + assertFalse(PROJECT_WITH_PARENT.equals(returnedProject)); + assertEquals(PROJECT_WITH_PARENT.getProjectId(), returnedProject.getProjectId()); + assertEquals(PROJECT_WITH_PARENT.getName(), returnedProject.getName()); + assertEquals(PROJECT_WITH_PARENT.getLabels(), returnedProject.getLabels()); + assertNull(returnedProject.getParent()); + assertNull(returnedProject.getProjectNumber()); + assertNull(returnedProject.getLifecycleState()); + assertNull(returnedProject.getCreateTime()); + } + + @Test + public void testListFilterOptions() { + Map rpcFilterOptions = new HashMap<>(); + rpcFilterOptions.put( + ResourceManagerRpc.Option.FILTER, "id:* name:myProject labels.color:blue LABELS.SIZE:*"); + com.google.api.services.cloudresourcemanager.model.Project matchingProject = + new com.google.api.services.cloudresourcemanager.model.Project() + .setProjectId("matching-project") + .setName("MyProject") + .setLabels(ImmutableMap.of("color", "blue", "size", "big")); + com.google.api.services.cloudresourcemanager.model.Project nonMatchingProject1 = + new com.google.api.services.cloudresourcemanager.model.Project() + .setProjectId("non-matching-project1") + .setName("myProject"); + nonMatchingProject1.setLabels(ImmutableMap.of("color", "blue")); + com.google.api.services.cloudresourcemanager.model.Project nonMatchingProject2 = + new com.google.api.services.cloudresourcemanager.model.Project() + .setProjectId("non-matching-project2") + .setName("myProj") + .setLabels(ImmutableMap.of("color", "blue", "size", "big")); + com.google.api.services.cloudresourcemanager.model.Project nonMatchingProject3 = + new com.google.api.services.cloudresourcemanager.model.Project().setProjectId( + "non-matching-project3"); + rpc.create(matchingProject); + rpc.create(nonMatchingProject1); + rpc.create(nonMatchingProject2); + rpc.create(nonMatchingProject3); + for (com.google.api.services.cloudresourcemanager.model.Project p : + rpc.list(rpcFilterOptions).y()) { + assertFalse(p.equals(nonMatchingProject1)); + assertFalse(p.equals(nonMatchingProject2)); + compareReadWriteFields(matchingProject, p); + } + } + + @Test + public void testReplace() { + com.google.api.services.cloudresourcemanager.model.Project createdProject = + rpc.create(COMPLETE_PROJECT); + String newName = "new name"; + Map newLabels = ImmutableMap.of("new k1", "new v1"); + com.google.api.services.cloudresourcemanager.model.Project anotherCompleteProject = + new com.google.api.services.cloudresourcemanager.model.Project() + .setProjectId(COMPLETE_PROJECT.getProjectId()) + .setName(newName) + .setLabels(newLabels) + .setProjectNumber(987654321L) + .setCreateTime("2000-01-01T00:00:00.001Z") + .setLifecycleState("DELETE_REQUESTED"); + com.google.api.services.cloudresourcemanager.model.Project returnedProject = + rpc.replace(anotherCompleteProject); + compareReadWriteFields(anotherCompleteProject, returnedProject); + assertEquals(createdProject.getProjectNumber(), returnedProject.getProjectNumber()); + assertEquals(createdProject.getCreateTime(), returnedProject.getCreateTime()); + assertEquals(createdProject.getLifecycleState(), returnedProject.getLifecycleState()); + com.google.api.services.cloudresourcemanager.model.Project nonexistantProject = + new com.google.api.services.cloudresourcemanager.model.Project(); + nonexistantProject.setProjectId("some-project-id-that-does-not-exist"); + try { + rpc.replace(nonexistantProject); + fail("Should fail because the project doesn't exist."); + } catch (ResourceManagerException e) { + assertEquals(403, e.code()); + assertTrue(e.getMessage().contains("the project was not found")); + } + } + + @Test + public void testReplaceWhenDeleteRequested() { + rpc.create(COMPLETE_PROJECT); + rpc.delete(COMPLETE_PROJECT.getProjectId()); + com.google.api.services.cloudresourcemanager.model.Project anotherProject = + new com.google.api.services.cloudresourcemanager.model.Project().setProjectId( + COMPLETE_PROJECT.getProjectId()); + try { + rpc.replace(anotherProject); + fail("Should fail because the project is not ACTIVE."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertTrue(e.getMessage().contains("the lifecycle state was not ACTIVE")); + } + } + + @Test + public void testReplaceWhenDeleteInProgress() { + rpc.create(COMPLETE_PROJECT); + RESOURCE_MANAGER_HELPER.changeLifecycleState( + COMPLETE_PROJECT.getProjectId(), "DELETE_IN_PROGRESS"); + com.google.api.services.cloudresourcemanager.model.Project anotherProject = + new com.google.api.services.cloudresourcemanager.model.Project().setProjectId( + COMPLETE_PROJECT.getProjectId()); + try { + rpc.replace(anotherProject); + fail("Should fail because the project is not ACTIVE."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertTrue(e.getMessage().contains("the lifecycle state was not ACTIVE")); + } + } + + @Test + public void testReplaceAddingParent() { + rpc.create(COMPLETE_PROJECT); + com.google.api.services.cloudresourcemanager.model.Project anotherProject = + new com.google.api.services.cloudresourcemanager.model.Project() + .setProjectId(COMPLETE_PROJECT.getProjectId()) + .setParent(PARENT); + try { + rpc.replace(anotherProject); + fail("Should fail because the project's parent was modified after creation."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertEquals( + "The server currently only supports setting the parent once " + + "and does not allow unsetting it.", + e.getMessage()); + } + } + + @Test + public void testReplaceRemovingParent() { + rpc.create(PROJECT_WITH_PARENT); + com.google.api.services.cloudresourcemanager.model.Project anotherProject = + new com.google.api.services.cloudresourcemanager.model.Project().setProjectId( + PROJECT_WITH_PARENT.getProjectId()); + try { + rpc.replace(anotherProject); + fail("Should fail because the project's parent was unset."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertEquals( + "The server currently only supports setting the parent once " + + "and does not allow unsetting it.", + e.getMessage()); + } + } + + @Test + public void testUndelete() { + rpc.create(COMPLETE_PROJECT); + rpc.delete(COMPLETE_PROJECT.getProjectId()); + assertEquals( + "DELETE_REQUESTED", + rpc.get(COMPLETE_PROJECT.getProjectId(), EMPTY_RPC_OPTIONS).getLifecycleState()); + rpc.undelete(COMPLETE_PROJECT.getProjectId()); + com.google.api.services.cloudresourcemanager.model.Project revivedProject = + rpc.get(COMPLETE_PROJECT.getProjectId(), EMPTY_RPC_OPTIONS); + compareReadWriteFields(COMPLETE_PROJECT, revivedProject); + assertEquals("ACTIVE", revivedProject.getLifecycleState()); + try { + rpc.undelete("invalid-project-id"); + fail("Should fail because the project doesn't exist."); + } catch (ResourceManagerException e) { + assertEquals(403, e.code()); + assertTrue(e.getMessage().contains("the project was not found")); + } + } + + @Test + public void testUndeleteWhenActive() { + rpc.create(COMPLETE_PROJECT); + try { + rpc.undelete(COMPLETE_PROJECT.getProjectId()); + fail("Should fail because the project is not deleted."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertTrue(e.getMessage().contains("lifecycle state was not DELETE_REQUESTED")); + } + } + + @Test + public void testUndeleteWhenDeleteInProgress() { + rpc.create(COMPLETE_PROJECT); + RESOURCE_MANAGER_HELPER.changeLifecycleState( + COMPLETE_PROJECT.getProjectId(), "DELETE_IN_PROGRESS"); + try { + rpc.undelete(COMPLETE_PROJECT.getProjectId()); + fail("Should fail because the project is in the process of being deleted."); + } catch (ResourceManagerException e) { + assertEquals(400, e.code()); + assertTrue(e.getMessage().contains("lifecycle state was not DELETE_REQUESTED")); + } + } + + @Test + public void testChangeLifecycleStatus() { + assertFalse(RESOURCE_MANAGER_HELPER.changeLifecycleState( + COMPLETE_PROJECT.getProjectId(), "DELETE_IN_PROGRESS")); + rpc.create(COMPLETE_PROJECT); + assertTrue(RESOURCE_MANAGER_HELPER.changeLifecycleState( + COMPLETE_PROJECT.getProjectId(), "DELETE_IN_PROGRESS")); + assertEquals( + "DELETE_IN_PROGRESS", + rpc.get(COMPLETE_PROJECT.getProjectId(), EMPTY_RPC_OPTIONS).getLifecycleState()); + try { + RESOURCE_MANAGER_HELPER.changeLifecycleState( + COMPLETE_PROJECT.getProjectId(), "INVALID_STATE"); + fail("Should fail because of an invalid lifecycle state"); + } catch (IllegalArgumentException e) { + // ignore + } + } + + @Test + public void testRemoveProject() { + assertFalse(RESOURCE_MANAGER_HELPER.removeProject(COMPLETE_PROJECT.getProjectId())); + rpc.create(COMPLETE_PROJECT); + assertTrue(RESOURCE_MANAGER_HELPER.removeProject(COMPLETE_PROJECT.getProjectId())); + assertNull(rpc.get(COMPLETE_PROJECT.getProjectId(), EMPTY_RPC_OPTIONS)); + } + + private void compareReadWriteFields( + com.google.api.services.cloudresourcemanager.model.Project expected, + com.google.api.services.cloudresourcemanager.model.Project actual) { + assertEquals(expected.getProjectId(), actual.getProjectId()); + assertEquals(expected.getName(), actual.getName()); + assertEquals(expected.getLabels(), actual.getLabels()); + assertEquals(expected.getParent(), actual.getParent()); + } +} diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectInfoTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectInfoTest.java new file mode 100644 index 000000000000..3aaef8047322 --- /dev/null +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectInfoTest.java @@ -0,0 +1,109 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableMap; + +import org.junit.Test; + +import java.util.Map; + +public class ProjectInfoTest { + + private static final String PROJECT_ID = "project-id"; + private static final String NAME = "myProj"; + private static final Map LABELS = ImmutableMap.of("k1", "v1", "k2", "v2"); + private static final Long PROJECT_NUMBER = 123L; + private static final Long CREATE_TIME_MILLIS = 123456789L; + private static final ProjectInfo.State STATE = ProjectInfo.State.DELETE_REQUESTED; + private static final ProjectInfo.ResourceId PARENT = + new ProjectInfo.ResourceId("id", "organization"); + private static final ProjectInfo FULL_PROJECT_INFO = ProjectInfo.builder(PROJECT_ID) + .name(NAME) + .labels(LABELS) + .projectNumber(PROJECT_NUMBER) + .createTimeMillis(CREATE_TIME_MILLIS) + .state(STATE) + .parent(PARENT) + .build(); + private static final ProjectInfo PARTIAL_PROJECT_INFO = ProjectInfo.builder(PROJECT_ID).build(); + private static final ProjectInfo UNNAMED_PROJECT_FROM_LIST = + PARTIAL_PROJECT_INFO.toBuilder().name("Unnamed").build(); + + @Test + public void testBuilder() { + assertEquals(PROJECT_ID, FULL_PROJECT_INFO.projectId()); + assertEquals(NAME, FULL_PROJECT_INFO.name()); + assertEquals(LABELS, FULL_PROJECT_INFO.labels()); + assertEquals(PROJECT_NUMBER, FULL_PROJECT_INFO.projectNumber()); + assertEquals(CREATE_TIME_MILLIS, FULL_PROJECT_INFO.createTimeMillis()); + assertEquals(STATE, FULL_PROJECT_INFO.state()); + + assertEquals(PROJECT_ID, PARTIAL_PROJECT_INFO.projectId()); + assertEquals(null, PARTIAL_PROJECT_INFO.name()); + assertTrue(PARTIAL_PROJECT_INFO.labels().isEmpty()); + assertEquals(null, PARTIAL_PROJECT_INFO.projectNumber()); + assertEquals(null, PARTIAL_PROJECT_INFO.createTimeMillis()); + assertEquals(null, PARTIAL_PROJECT_INFO.state()); + } + + @Test + public void testToBuilder() { + compareProjects(FULL_PROJECT_INFO, FULL_PROJECT_INFO.toBuilder().build()); + compareProjects(PARTIAL_PROJECT_INFO, PARTIAL_PROJECT_INFO.toBuilder().build()); + } + + @Test + public void testToAndFromPb() { + assertTrue(FULL_PROJECT_INFO.toPb().getCreateTime().endsWith("Z")); + compareProjects(FULL_PROJECT_INFO, ProjectInfo.fromPb(FULL_PROJECT_INFO.toPb())); + compareProjects(PARTIAL_PROJECT_INFO, ProjectInfo.fromPb(PARTIAL_PROJECT_INFO.toPb())); + compareProjects(PARTIAL_PROJECT_INFO, ProjectInfo.fromPb(UNNAMED_PROJECT_FROM_LIST.toPb())); + } + + @Test + public void testEquals() { + compareProjects( + FULL_PROJECT_INFO, + ProjectInfo.builder(PROJECT_ID) + .name(NAME) + .labels(LABELS) + .projectNumber(PROJECT_NUMBER) + .createTimeMillis(CREATE_TIME_MILLIS) + .state(STATE) + .parent(PARENT) + .build()); + compareProjects(PARTIAL_PROJECT_INFO, ProjectInfo.builder(PROJECT_ID).build()); + assertNotEquals(FULL_PROJECT_INFO, PARTIAL_PROJECT_INFO); + } + + private void compareProjects(ProjectInfo expected, ProjectInfo value) { + assertEquals(expected, value); + assertEquals(expected.projectId(), value.projectId()); + assertEquals(expected.name(), value.name()); + assertEquals(expected.labels(), value.labels()); + assertEquals(expected.projectNumber(), value.projectNumber()); + assertEquals(expected.createTimeMillis(), value.createTimeMillis()); + assertEquals(expected.state(), value.state()); + assertEquals(expected.parent(), value.parent()); + } +} + diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectTest.java new file mode 100644 index 000000000000..a741963913c6 --- /dev/null +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ProjectTest.java @@ -0,0 +1,199 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import static org.easymock.EasyMock.anyObject; +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.createStrictMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; + +import com.google.common.collect.ImmutableMap; + +import org.junit.After; +import org.junit.Test; + +import java.util.Map; + +public class ProjectTest { + private static final String PROJECT_ID = "project-id"; + private static final String NAME = "myProj"; + private static final Map LABELS = ImmutableMap.of("k1", "v1", "k2", "v2"); + private static final Long PROJECT_NUMBER = 123L; + private static final Long CREATE_TIME_MILLIS = 123456789L; + private static final ProjectInfo.State STATE = ProjectInfo.State.DELETE_REQUESTED; + private static final ProjectInfo PROJECT_INFO = ProjectInfo.builder(PROJECT_ID) + .name(NAME) + .labels(LABELS) + .projectNumber(PROJECT_NUMBER) + .createTimeMillis(CREATE_TIME_MILLIS) + .state(STATE) + .build(); + + private ResourceManager serviceMockReturnsOptions = createStrictMock(ResourceManager.class); + private ResourceManagerOptions mockOptions = createMock(ResourceManagerOptions.class); + private ResourceManager resourceManager; + private Project expectedProject; + private Project project; + + @After + public void tearDown() throws Exception { + verify(resourceManager); + } + + private void initializeExpectedProject(int optionsCalls) { + expect(serviceMockReturnsOptions.options()).andReturn(mockOptions).times(optionsCalls); + replay(serviceMockReturnsOptions); + resourceManager = createStrictMock(ResourceManager.class); + expectedProject = + new Project(serviceMockReturnsOptions, new ProjectInfo.BuilderImpl(PROJECT_INFO)); + } + + private void initializeProject() { + project = new Project(resourceManager, new ProjectInfo.BuilderImpl(PROJECT_INFO)); + } + + @Test + public void testBuilder() { + initializeExpectedProject(2); + replay(resourceManager); + Project builtProject = Project.builder(serviceMockReturnsOptions, PROJECT_ID) + .name(NAME) + .labels(LABELS) + .projectNumber(PROJECT_NUMBER) + .createTimeMillis(CREATE_TIME_MILLIS) + .state(STATE) + .build(); + assertEquals(PROJECT_ID, builtProject.projectId()); + assertEquals(NAME, builtProject.name()); + assertEquals(LABELS, builtProject.labels()); + assertEquals(PROJECT_NUMBER, builtProject.projectNumber()); + assertEquals(CREATE_TIME_MILLIS, builtProject.createTimeMillis()); + assertEquals(STATE, builtProject.state()); + assertSame(serviceMockReturnsOptions, builtProject.resourceManager()); + } + + @Test + public void testToBuilder() { + initializeExpectedProject(4); + replay(resourceManager); + compareProjects(expectedProject, expectedProject.toBuilder().build()); + } + + @Test + public void testGet() { + initializeExpectedProject(1); + expect(resourceManager.get(PROJECT_INFO.projectId())).andReturn(expectedProject); + replay(resourceManager); + Project loadedProject = Project.get(resourceManager, PROJECT_INFO.projectId()); + assertEquals(expectedProject, loadedProject); + } + + @Test + public void testReload() { + initializeExpectedProject(2); + ProjectInfo newInfo = PROJECT_INFO.toBuilder().addLabel("k3", "v3").build(); + Project expectedProject = + new Project(serviceMockReturnsOptions, new ProjectInfo.BuilderImpl(newInfo)); + expect(resourceManager.options()).andReturn(mockOptions); + expect(resourceManager.get(PROJECT_INFO.projectId())).andReturn(expectedProject); + replay(resourceManager); + initializeProject(); + Project newProject = project.reload(); + assertEquals(expectedProject, newProject); + } + + @Test + public void testLoadNull() { + initializeExpectedProject(1); + expect(resourceManager.get(PROJECT_INFO.projectId())).andReturn(null); + replay(resourceManager); + assertNull(Project.get(resourceManager, PROJECT_INFO.projectId())); + } + + @Test + public void testReloadNull() { + initializeExpectedProject(1); + expect(resourceManager.options()).andReturn(mockOptions); + expect(resourceManager.get(PROJECT_INFO.projectId())).andReturn(null); + replay(resourceManager); + Project reloadedProject = + new Project(resourceManager, new ProjectInfo.BuilderImpl(PROJECT_INFO)).reload(); + assertNull(reloadedProject); + } + + @Test + public void testResourceManager() { + initializeExpectedProject(1); + replay(resourceManager); + assertEquals(serviceMockReturnsOptions, expectedProject.resourceManager()); + } + + @Test + public void testDelete() { + initializeExpectedProject(1); + expect(resourceManager.options()).andReturn(mockOptions); + resourceManager.delete(PROJECT_INFO.projectId()); + replay(resourceManager); + initializeProject(); + project.delete(); + } + + @Test + public void testUndelete() { + initializeExpectedProject(1); + expect(resourceManager.options()).andReturn(mockOptions); + resourceManager.undelete(PROJECT_INFO.projectId()); + replay(resourceManager); + initializeProject(); + project.undelete(); + } + + @Test + public void testReplace() { + initializeExpectedProject(2); + Project expectedReplacedProject = expectedProject.toBuilder().addLabel("k3", "v3").build(); + expect(resourceManager.options()).andReturn(mockOptions).times(2); + expect(resourceManager.replace(anyObject(Project.class))).andReturn(expectedReplacedProject); + replay(resourceManager); + initializeProject(); + Project newProject = + new Project(resourceManager, new ProjectInfo.BuilderImpl(expectedReplacedProject)); + Project actualReplacedProject = newProject.replace(); + compareProjectInfos(expectedReplacedProject, actualReplacedProject); + } + + private void compareProjects(Project expected, Project value) { + assertEquals(expected, value); + compareProjectInfos(expected, value); + assertEquals(expected.resourceManager().options(), value.resourceManager().options()); + } + + private void compareProjectInfos(ProjectInfo expected, ProjectInfo value) { + assertEquals(expected.projectId(), value.projectId()); + assertEquals(expected.name(), value.name()); + assertEquals(expected.labels(), value.labels()); + assertEquals(expected.projectNumber(), value.projectNumber()); + assertEquals(expected.createTimeMillis(), value.createTimeMillis()); + assertEquals(expected.state(), value.state()); + assertEquals(expected.parent(), value.parent()); + } +} diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerExceptionTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerExceptionTest.java new file mode 100644 index 000000000000..388f38f31c35 --- /dev/null +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerExceptionTest.java @@ -0,0 +1,94 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.gcloud.BaseServiceException; +import com.google.gcloud.RetryHelper.RetryHelperException; + +import org.junit.Test; + +import java.io.IOException; +import java.net.SocketTimeoutException; + +public class ResourceManagerExceptionTest { + + @Test + public void testResourceManagerException() { + ResourceManagerException exception = new ResourceManagerException(500, "message"); + assertEquals(500, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new ResourceManagerException(503, "message"); + assertEquals(503, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new ResourceManagerException(429, "message"); + assertEquals(429, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new ResourceManagerException(403, "message"); + assertEquals(403, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertFalse(exception.retryable()); + assertTrue(exception.idempotent()); + + IOException cause = new SocketTimeoutException(); + exception = new ResourceManagerException(cause); + assertNull(exception.reason()); + assertNull(exception.getMessage()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + assertEquals(cause, exception.getCause()); + } + + @Test + public void testTranslateAndThrow() throws Exception { + ResourceManagerException cause = new ResourceManagerException(503, "message"); + RetryHelperException exceptionMock = createMock(RetryHelperException.class); + expect(exceptionMock.getCause()).andReturn(cause).times(2); + replay(exceptionMock); + try { + ResourceManagerException.translateAndThrow(exceptionMock); + } catch (BaseServiceException ex) { + assertEquals(503, ex.code()); + assertEquals("message", ex.getMessage()); + assertTrue(ex.retryable()); + assertTrue(ex.idempotent()); + } finally { + verify(exceptionMock); + } + } +} diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java new file mode 100644 index 000000000000..37c54718fb4a --- /dev/null +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/ResourceManagerImplTest.java @@ -0,0 +1,333 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.Page; +import com.google.gcloud.resourcemanager.ProjectInfo.ResourceId; +import com.google.gcloud.resourcemanager.ResourceManager.ProjectField; +import com.google.gcloud.resourcemanager.ResourceManager.ProjectGetOption; +import com.google.gcloud.resourcemanager.ResourceManager.ProjectListOption; +import com.google.gcloud.resourcemanager.testing.LocalResourceManagerHelper; +import com.google.gcloud.spi.ResourceManagerRpc; +import com.google.gcloud.spi.ResourceManagerRpcFactory; + +import org.easymock.EasyMock; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.util.Map; + +public class ResourceManagerImplTest { + + private static final LocalResourceManagerHelper RESOURCE_MANAGER_HELPER = + LocalResourceManagerHelper.create(); + private static final ResourceManager RESOURCE_MANAGER = + RESOURCE_MANAGER_HELPER.options().service(); + private static final ProjectGetOption GET_FIELDS = + ProjectGetOption.fields(ProjectField.NAME, ProjectField.CREATE_TIME); + private static final ProjectListOption LIST_FIELDS = + ProjectListOption.fields(ProjectField.NAME, ProjectField.LABELS); + private static final ProjectListOption LIST_FILTER = + ProjectListOption.filter("id:* name:myProject labels.color:blue LABELS.SIZE:*"); + private static final ProjectInfo PARTIAL_PROJECT = ProjectInfo.builder("partial-project").build(); + private static final ResourceId PARENT = new ResourceId("id", "type"); + private static final ProjectInfo COMPLETE_PROJECT = ProjectInfo.builder("complete-project") + .name("name") + .labels(ImmutableMap.of("k1", "v1")) + .parent(PARENT) + .build(); + private static final Map EMPTY_RPC_OPTIONS = ImmutableMap.of(); + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @BeforeClass + public static void beforeClass() { + RESOURCE_MANAGER_HELPER.start(); + } + + @Before + public void setUp() { + clearProjects(); + } + + private void clearProjects() { + for (Project project : RESOURCE_MANAGER.list().values()) { + RESOURCE_MANAGER_HELPER.removeProject(project.projectId()); + } + } + + @AfterClass + public static void afterClass() { + RESOURCE_MANAGER_HELPER.stop(); + } + + private void compareReadWriteFields(ProjectInfo expected, ProjectInfo actual) { + assertEquals(expected.projectId(), actual.projectId()); + assertEquals(expected.name(), actual.name()); + assertEquals(expected.labels(), actual.labels()); + assertEquals(expected.parent(), actual.parent()); + } + + @Test + public void testCreate() { + Project returnedProject = RESOURCE_MANAGER.create(PARTIAL_PROJECT); + compareReadWriteFields(PARTIAL_PROJECT, returnedProject); + assertEquals(ProjectInfo.State.ACTIVE, returnedProject.state()); + assertNull(returnedProject.name()); + assertNull(returnedProject.parent()); + assertNotNull(returnedProject.projectNumber()); + assertNotNull(returnedProject.createTimeMillis()); + assertSame(RESOURCE_MANAGER, returnedProject.resourceManager()); + try { + RESOURCE_MANAGER.create(PARTIAL_PROJECT); + fail("Should fail, project already exists."); + } catch (ResourceManagerException e) { + assertEquals(409, e.code()); + assertTrue(e.getMessage().startsWith("A project with the same project ID") + && e.getMessage().endsWith("already exists.")); + } + returnedProject = RESOURCE_MANAGER.create(COMPLETE_PROJECT); + compareReadWriteFields(COMPLETE_PROJECT, returnedProject); + assertEquals(ProjectInfo.State.ACTIVE, returnedProject.state()); + assertNotNull(returnedProject.projectNumber()); + assertNotNull(returnedProject.createTimeMillis()); + assertSame(RESOURCE_MANAGER, returnedProject.resourceManager()); + } + + @Test + public void testDelete() { + RESOURCE_MANAGER.create(COMPLETE_PROJECT); + RESOURCE_MANAGER.delete(COMPLETE_PROJECT.projectId()); + assertEquals(ProjectInfo.State.DELETE_REQUESTED, + RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId()).state()); + try { + RESOURCE_MANAGER.delete("some-nonexistant-project-id"); + fail("Should fail because the project doesn't exist."); + } catch (ResourceManagerException e) { + assertEquals(403, e.code()); + assertTrue(e.getMessage().contains("not found.")); + } + } + + @Test + public void testGet() { + RESOURCE_MANAGER.create(COMPLETE_PROJECT); + Project returnedProject = RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId()); + compareReadWriteFields(COMPLETE_PROJECT, returnedProject); + assertEquals(RESOURCE_MANAGER, returnedProject.resourceManager()); + RESOURCE_MANAGER_HELPER.removeProject(COMPLETE_PROJECT.projectId()); + assertNull(RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId())); + } + + @Test + public void testGetWithOptions() { + Project originalProject = RESOURCE_MANAGER.create(COMPLETE_PROJECT); + Project returnedProject = RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId(), GET_FIELDS); + assertFalse(COMPLETE_PROJECT.equals(returnedProject)); + assertEquals(COMPLETE_PROJECT.projectId(), returnedProject.projectId()); + assertEquals(COMPLETE_PROJECT.name(), returnedProject.name()); + assertEquals(originalProject.createTimeMillis(), returnedProject.createTimeMillis()); + assertNull(returnedProject.parent()); + assertNull(returnedProject.projectNumber()); + assertNull(returnedProject.state()); + assertTrue(returnedProject.labels().isEmpty()); + assertEquals(RESOURCE_MANAGER, originalProject.resourceManager()); + assertEquals(RESOURCE_MANAGER, returnedProject.resourceManager()); + } + + @Test + public void testList() { + Page projects = RESOURCE_MANAGER.list(); + assertFalse(projects.values().iterator().hasNext()); // TODO: change this when #421 is resolved + RESOURCE_MANAGER.create(PARTIAL_PROJECT); + RESOURCE_MANAGER.create(COMPLETE_PROJECT); + for (Project p : RESOURCE_MANAGER.list().values()) { + if (p.projectId().equals(PARTIAL_PROJECT.projectId())) { + compareReadWriteFields(PARTIAL_PROJECT, p); + } else if (p.projectId().equals(COMPLETE_PROJECT.projectId())) { + compareReadWriteFields(COMPLETE_PROJECT, p); + } else { + fail("Some unexpected project returned by list."); + } + assertSame(RESOURCE_MANAGER, p.resourceManager()); + } + } + + @Test + public void testListFieldOptions() { + RESOURCE_MANAGER.create(COMPLETE_PROJECT); + Page projects = RESOURCE_MANAGER.list(LIST_FIELDS); + Project returnedProject = projects.iterateAll().next(); + assertEquals(COMPLETE_PROJECT.projectId(), returnedProject.projectId()); + assertEquals(COMPLETE_PROJECT.name(), returnedProject.name()); + assertEquals(COMPLETE_PROJECT.labels(), returnedProject.labels()); + assertNull(returnedProject.parent()); + assertNull(returnedProject.projectNumber()); + assertNull(returnedProject.state()); + assertNull(returnedProject.createTimeMillis()); + assertSame(RESOURCE_MANAGER, returnedProject.resourceManager()); + } + + @Test + public void testListFilterOptions() { + ProjectInfo matchingProject = ProjectInfo.builder("matching-project") + .name("MyProject") + .labels(ImmutableMap.of("color", "blue", "size", "big")) + .build(); + ProjectInfo nonMatchingProject1 = ProjectInfo.builder("non-matching-project1") + .name("myProject") + .labels(ImmutableMap.of("color", "blue")) + .build(); + ProjectInfo nonMatchingProject2 = ProjectInfo.builder("non-matching-project2") + .name("myProj") + .labels(ImmutableMap.of("color", "blue", "size", "big")) + .build(); + ProjectInfo nonMatchingProject3 = ProjectInfo.builder("non-matching-project3").build(); + RESOURCE_MANAGER.create(matchingProject); + RESOURCE_MANAGER.create(nonMatchingProject1); + RESOURCE_MANAGER.create(nonMatchingProject2); + RESOURCE_MANAGER.create(nonMatchingProject3); + for (Project p : RESOURCE_MANAGER.list(LIST_FILTER).values()) { + assertFalse(p.equals(nonMatchingProject1)); + assertFalse(p.equals(nonMatchingProject2)); + compareReadWriteFields(matchingProject, p); + assertSame(RESOURCE_MANAGER, p.resourceManager()); + } + } + + @Test + public void testReplace() { + ProjectInfo createdProject = RESOURCE_MANAGER.create(COMPLETE_PROJECT); + Map newLabels = ImmutableMap.of("new k1", "new v1"); + ProjectInfo anotherCompleteProject = ProjectInfo.builder(COMPLETE_PROJECT.projectId()) + .labels(newLabels) + .projectNumber(987654321L) + .createTimeMillis(230682061315L) + .state(ProjectInfo.State.DELETE_REQUESTED) + .parent(createdProject.parent()) + .build(); + Project returnedProject = RESOURCE_MANAGER.replace(anotherCompleteProject); + compareReadWriteFields(anotherCompleteProject, returnedProject); + assertEquals(createdProject.projectNumber(), returnedProject.projectNumber()); + assertEquals(createdProject.createTimeMillis(), returnedProject.createTimeMillis()); + assertEquals(createdProject.state(), returnedProject.state()); + assertEquals(RESOURCE_MANAGER, returnedProject.resourceManager()); + ProjectInfo nonexistantProject = + ProjectInfo.builder("some-project-id-that-does-not-exist").build(); + try { + RESOURCE_MANAGER.replace(nonexistantProject); + fail("Should fail because the project doesn't exist."); + } catch (ResourceManagerException e) { + assertEquals(403, e.code()); + assertTrue(e.getMessage().contains("the project was not found")); + } + } + + @Test + public void testUndelete() { + RESOURCE_MANAGER.create(COMPLETE_PROJECT); + RESOURCE_MANAGER.delete(COMPLETE_PROJECT.projectId()); + assertEquals( + ProjectInfo.State.DELETE_REQUESTED, + RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId()).state()); + RESOURCE_MANAGER.undelete(COMPLETE_PROJECT.projectId()); + ProjectInfo revivedProject = RESOURCE_MANAGER.get(COMPLETE_PROJECT.projectId()); + compareReadWriteFields(COMPLETE_PROJECT, revivedProject); + assertEquals(ProjectInfo.State.ACTIVE, revivedProject.state()); + try { + RESOURCE_MANAGER.undelete("invalid-project-id"); + fail("Should fail because the project doesn't exist."); + } catch (ResourceManagerException e) { + assertEquals(403, e.code()); + assertTrue(e.getMessage().contains("the project was not found")); + } + } + + @Test + public void testRetryableException() { + ResourceManagerRpcFactory rpcFactoryMock = EasyMock.createMock(ResourceManagerRpcFactory.class); + ResourceManagerRpc resourceManagerRpcMock = EasyMock.createMock(ResourceManagerRpc.class); + EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(ResourceManagerOptions.class))) + .andReturn(resourceManagerRpcMock); + EasyMock.replay(rpcFactoryMock); + ResourceManager resourceManagerMock = ResourceManagerOptions.builder() + .serviceRpcFactory(rpcFactoryMock) + .build() + .service(); + EasyMock.expect(resourceManagerRpcMock.get(PARTIAL_PROJECT.projectId(), EMPTY_RPC_OPTIONS)) + .andThrow(new ResourceManagerException(500, "Internal Error")) + .andReturn(PARTIAL_PROJECT.toPb()); + EasyMock.replay(resourceManagerRpcMock); + Project returnedProject = resourceManagerMock.get(PARTIAL_PROJECT.projectId()); + assertEquals( + new Project(resourceManagerMock, new ProjectInfo.BuilderImpl(PARTIAL_PROJECT)), + returnedProject); + } + + @Test + public void testNonRetryableException() { + ResourceManagerRpcFactory rpcFactoryMock = EasyMock.createMock(ResourceManagerRpcFactory.class); + ResourceManagerRpc resourceManagerRpcMock = EasyMock.createMock(ResourceManagerRpc.class); + EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(ResourceManagerOptions.class))) + .andReturn(resourceManagerRpcMock); + EasyMock.replay(rpcFactoryMock); + ResourceManager resourceManagerMock = ResourceManagerOptions.builder() + .serviceRpcFactory(rpcFactoryMock) + .build() + .service(); + EasyMock.expect(resourceManagerRpcMock.get(PARTIAL_PROJECT.projectId(), EMPTY_RPC_OPTIONS)) + .andThrow(new ResourceManagerException( + 403, "Project " + PARTIAL_PROJECT.projectId() + " not found.")) + .once(); + EasyMock.replay(resourceManagerRpcMock); + thrown.expect(ResourceManagerException.class); + thrown.expectMessage("Project " + PARTIAL_PROJECT.projectId() + " not found."); + resourceManagerMock.get(PARTIAL_PROJECT.projectId()); + } + + @Test + public void testRuntimeException() { + ResourceManagerRpcFactory rpcFactoryMock = EasyMock.createMock(ResourceManagerRpcFactory.class); + ResourceManagerRpc resourceManagerRpcMock = EasyMock.createMock(ResourceManagerRpc.class); + EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(ResourceManagerOptions.class))) + .andReturn(resourceManagerRpcMock); + EasyMock.replay(rpcFactoryMock); + ResourceManager resourceManagerMock = + ResourceManagerOptions.builder().serviceRpcFactory(rpcFactoryMock).build().service(); + String exceptionMessage = "Artificial runtime exception"; + EasyMock.expect(resourceManagerRpcMock.get(PARTIAL_PROJECT.projectId(), EMPTY_RPC_OPTIONS)) + .andThrow(new RuntimeException(exceptionMessage)); + EasyMock.replay(resourceManagerRpcMock); + thrown.expect(ResourceManagerException.class); + thrown.expectMessage(exceptionMessage); + resourceManagerMock.get(PARTIAL_PROJECT.projectId()); + } +} diff --git a/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/SerializationTest.java b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/SerializationTest.java new file mode 100644 index 000000000000..497de880254a --- /dev/null +++ b/gcloud-java-resourcemanager/src/test/java/com/google/gcloud/resourcemanager/SerializationTest.java @@ -0,0 +1,94 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.resourcemanager; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotSame; + +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.PageImpl; +import com.google.gcloud.RetryParams; + +import org.junit.Test; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; +import java.util.Collections; + +public class SerializationTest { + +private static final ResourceManager RESOURCE_MANAGER = + ResourceManagerOptions.defaultInstance().service(); + private static final ProjectInfo PARTIAL_PROJECT_INFO = ProjectInfo.builder("id1").build(); + private static final ProjectInfo FULL_PROJECT_INFO = ProjectInfo.builder("id") + .name("name") + .labels(ImmutableMap.of("key", "value")) + .projectNumber(123L) + .state(ProjectInfo.State.ACTIVE) + .createTimeMillis(1234L) + .build(); + private static final Project PROJECT = + new Project(RESOURCE_MANAGER, new ProjectInfo.BuilderImpl(FULL_PROJECT_INFO)); + private static final PageImpl PAGE_RESULT = + new PageImpl<>(null, "c", Collections.singletonList(PROJECT)); + private static final ResourceManager.ProjectGetOption PROJECT_GET_OPTION = + ResourceManager.ProjectGetOption.fields(ResourceManager.ProjectField.NAME); + private static final ResourceManager.ProjectListOption PROJECT_LIST_OPTION = + ResourceManager.ProjectListOption.filter("name:*"); + + @Test + public void testServiceOptions() throws Exception { + ResourceManagerOptions options = ResourceManagerOptions.builder().build(); + ResourceManagerOptions serializedCopy = serializeAndDeserialize(options); + assertEquals(options, serializedCopy); + options = options.toBuilder() + .projectId("some-unnecessary-project-ID") + .retryParams(RetryParams.defaultInstance()) + .build(); + serializedCopy = serializeAndDeserialize(options); + assertEquals(options, serializedCopy); + } + + @Test + public void testModelAndRequests() throws Exception { + Serializable[] objects = {PARTIAL_PROJECT_INFO, FULL_PROJECT_INFO, PROJECT, PAGE_RESULT, + PROJECT_GET_OPTION, PROJECT_LIST_OPTION}; + for (Serializable obj : objects) { + Object copy = serializeAndDeserialize(obj); + assertEquals(obj, obj); + assertEquals(obj, copy); + assertNotSame(obj, copy); + assertEquals(copy, copy); + } + } + + @SuppressWarnings("unchecked") + private T serializeAndDeserialize(T obj) throws IOException, ClassNotFoundException { + ByteArrayOutputStream bytes = new ByteArrayOutputStream(); + try (ObjectOutputStream output = new ObjectOutputStream(bytes)) { + output.writeObject(obj); + } + try (ObjectInputStream input = + new ObjectInputStream(new ByteArrayInputStream(bytes.toByteArray()))) { + return (T) input.readObject(); + } + } +} diff --git a/gcloud-java-storage/README.md b/gcloud-java-storage/README.md index bd3a9ea0da88..7260ab5fe5c5 100644 --- a/gcloud-java-storage/README.md +++ b/gcloud-java-storage/README.md @@ -1,7 +1,7 @@ Google Cloud Java Client for Storage ==================================== -Java idiomatic client for [Google Cloud Storage] (https://cloud.google.com/storage/). +Java idiomatic client for [Google Cloud Storage] (https://cloud.google.com/storage/). [![Build Status](https://travis-ci.org/GoogleCloudPlatform/gcloud-java.svg?branch=master)](https://travis-ci.org/GoogleCloudPlatform/gcloud-java) [![Coverage Status](https://coveralls.io/repos/GoogleCloudPlatform/gcloud-java/badge.svg?branch=master)](https://coveralls.io/r/GoogleCloudPlatform/gcloud-java?branch=master) @@ -15,14 +15,22 @@ Java idiomatic client for [Google Cloud Storage] (https://cloud.google.com/stora Quickstart ---------- -Add this to your pom.xml file +If you are using Maven, add this to your pom.xml file ```xml com.google.gcloud gcloud-java-storage - 0.0.10 + 0.1.3 ``` +If you are using Gradle, add this to your dependencies +```Groovy +compile 'com.google.gcloud:gcloud-java-storage:0.1.3' +``` +If you are using SBT, add this to your dependencies +```Scala +libraryDependencies += "com.google.gcloud" % "gcloud-java-storage" % "0.1.3" +``` Example Application ------------------- @@ -48,33 +56,146 @@ Cloud Storage for your project. See the ``gcloud-java`` API [storage documentation][storage-api] to learn how to interact with the Cloud Storage using this Client Library. -Here is a code snippet showing a simple usage example from within Compute/App Engine. Note that you must [supply credentials](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) and a project ID if running this snippet elsewhere. +Getting Started +--------------- +#### Prerequisites +For this tutorial, you will need a [Google Developers Console](https://console.developers.google.com/) project with the Storage JSON API enabled. You will need to [enable billing](https://support.google.com/cloud/answer/6158867?hl=en) to use Google Cloud Storage. [Follow these instructions](https://cloud.google.com/docs/authentication#preparation) to get your project set up. You will also need to set up the local development environment by [installing the Google Cloud SDK](https://cloud.google.com/sdk/) and running the following commands in command line: `gcloud auth login` and `gcloud config set project [YOUR PROJECT ID]`. + +#### Installation and setup +You'll need to obtain the `gcloud-java-storage` library. See the [Quickstart](#quickstart) section to add `gcloud-java-storage` as a dependency in your code. + +#### Creating an authorized service object +To make authenticated requests to Google Cloud Storage, you must create a service object with credentials. You can then make API calls by calling methods on the Storage service object. The simplest way to authenticate is to use [Application Default Credentials](https://developers.google.com/identity/protocols/application-default-credentials). These credentials are automatically inferred from your environment, so you only need the following code to create your service object: + +```java +import com.google.gcloud.storage.Storage; +import com.google.gcloud.storage.StorageOptions; + +Storage storage = StorageOptions.defaultInstance().service(); +``` + +For other authentication options, see the [Authentication](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) page. + +#### Storing data +Stored objects are called "blobs" in `gcloud-java` and are organized into containers called "buckets". In this code snippet, we will create a new bucket and upload a blob to that bucket. + +Add the following imports at the top of your file: + +```java +import static java.nio.charset.StandardCharsets.UTF_8; + +import com.google.gcloud.storage.BlobId; +import com.google.gcloud.storage.BlobInfo; +import com.google.gcloud.storage.BucketInfo; +``` + +Then add the following code to create a bucket and upload a simple blob. + +*Important: Bucket names have to be globally unique. If you choose a bucket name that already exists, you'll get a helpful error message telling you to choose another name. In the code below, replace "my_unique_bucket" with a unique bucket name. See more about naming rules [here](https://cloud.google.com/storage/docs/bucket-naming?hl=en#requirements).* + +```java +// Create a bucket +String bucketName = "my_unique_bucket"; // Change this to something unique +BucketInfo bucketInfo = storage.create(BucketInfo.of(bucketName)); + +// Upload a blob to the newly created bucket +BlobId blobId = BlobId.of(bucketName, "my_blob_name"); +BlobInfo blobInfo = storage.create( + BlobInfo.builder(blobId).contentType("text/plain").build(), + "a simple blob".getBytes(UTF_8)); +``` + +At this point, you will be able to see your newly created bucket and blob on the Google Developers Console. + +#### Retrieving data +Now that we have content uploaded to the server, we can see how to read data from the server. Add the following line to your program to get back the blob we uploaded. + +```java +String blobContent = new String(storage.readAllBytes(blobId), UTF_8); +``` + +#### Listing buckets and contents of buckets +Suppose that you've added more buckets and blobs, and now you want to see the names of your buckets and the contents of each one. Add the following imports: + +```java +import java.util.Iterator; +``` + +Then add the following code to list all your buckets and all the blobs inside your newly created bucket. + +```java +// List all your buckets +Iterator bucketInfoIterator = storage.list().iterateAll(); +System.out.println("My buckets:"); +while (bucketInfoIterator.hasNext()) { + System.out.println(bucketInfoIterator.next()); +} + +// List the blobs in a particular bucket +Iterator blobIterator = storage.list(bucketName).iterateAll(); +System.out.println("My blobs:"); +while (blobIterator.hasNext()) { + System.out.println(blobIterator.next()); +} +``` + +#### Complete source code + +Here we put together all the code shown above into one program. This program assumes that you are running on Compute Engine or from your own desktop. To run this example on App Engine, simply move the code from the main method to your application's servlet class and change the print statements to display on your webpage. ```java import static java.nio.charset.StandardCharsets.UTF_8; -import com.google.gcloud.storage.Blob; +import com.google.gcloud.storage.BlobId; +import com.google.gcloud.storage.BlobInfo; +import com.google.gcloud.storage.BucketInfo; import com.google.gcloud.storage.Storage; -import com.google.gcloud.storage.StorageFactory; import com.google.gcloud.storage.StorageOptions; -import java.nio.ByteBuffer; -import java.nio.channels.WritableByteChannel; - -Storage storage = StorageFactory.instance().get(StorageOptions.getDefaultInstance()); -Blob blob = new Blob(storage, "bucket", "blob_name"); -if (!blob.exists()) { - storage2.create(blob.info(), "Hello, Cloud Storage!".getBytes(UTF_8)); -} else { - System.out.println("Updating content for " + blob.info().name()); - byte[] prevContent = blob.content(); - System.out.println(new String(prevContent, UTF_8)); - WritableByteChannel channel = blob.writer(); - channel.write(ByteBuffer.wrap("Updated content".getBytes(UTF_8))); - channel.close(); +import java.util.Iterator; + +public class GcloudStorageExample { + + public static void main(String[] args) { + // Create a service object + // Credentials are inferred from the environment. + Storage storage = StorageOptions.defaultInstance().service(); + + // Create a bucket + String bucketName = "my_unique_bucket"; // Change this to something unique + BucketInfo bucketInfo = storage.create(BucketInfo.of(bucketName)); + + // Upload a blob to the newly created bucket + BlobId blobId = BlobId.of(bucketName, "my_blob_name"); + BlobInfo blobInfo = storage.create( + BlobInfo.builder(blobId).contentType("text/plain").build(), + "a simple blob".getBytes(UTF_8)); + + // Retrieve a blob from the server + String blobContent = new String(storage.readAllBytes(blobId), UTF_8); + + // List all your buckets + Iterator bucketInfoIterator = storage.list().iterateAll(); + System.out.println("My buckets:"); + while (bucketInfoIterator.hasNext()) { + System.out.println(bucketInfoIterator.next()); + } + + // List the blobs in a particular bucket + Iterator blobIterator = storage.list(bucketName).iterateAll(); + System.out.println("My blobs:"); + while (blobIterator.hasNext()) { + System.out.println(blobIterator.next()); + } + } } ``` +Troubleshooting +--------------- + +To get help, follow the `gcloud-java` links in the `gcloud-*` [shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting). + Java Versions ------------- @@ -101,7 +222,9 @@ Contributing Contributions to this library are always welcome and highly encouraged. -See [CONTRIBUTING] for more information on how to get started. +See `gcloud-java`'s [CONTRIBUTING] documentation and the `gcloud-*` [shared documentation](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/contributing/readme.md#how-to-contribute-to-gcloud) for more information on how to get started. + +Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information. License ------- @@ -110,6 +233,7 @@ Apache 2.0 - See [LICENSE] for more information. [CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md +[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct [LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE [TESTING]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/TESTING.md#testing-code-that-uses-storage [cloud-platform]: https://cloud.google.com/ @@ -118,3 +242,4 @@ Apache 2.0 - See [LICENSE] for more information. [cloud-storage-docs]: https://cloud.google.com/storage/docs/overview [cloud-storage-create-bucket]: https://cloud.google.com/storage/docs/cloud-console#_creatingbuckets [storage-api]: http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/storage/package-summary.html +[cloud-storage-activation]:https://cloud.google.com/storage/docs/signup?hl=en diff --git a/gcloud-java-storage/pom.xml b/gcloud-java-storage/pom.xml index 2d4d9f8df55b..4e9d368a12bb 100644 --- a/gcloud-java-storage/pom.xml +++ b/gcloud-java-storage/pom.xml @@ -11,8 +11,11 @@ com.google.gcloud gcloud-java-pom - 0.0.11-SNAPSHOT + 0.1.4-SNAPSHOT + + gcloud-java-storage + ${project.groupId} @@ -29,6 +32,10 @@ com.google.guava guava-jdk5 + + com.google.api-client + google-api-client + diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java b/gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java index 52820171cf29..dc84a1de5559 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java @@ -15,6 +15,7 @@ package com.google.gcloud.spi; import static com.google.gcloud.spi.StorageRpc.Option.DELIMITER; +import static com.google.gcloud.spi.StorageRpc.Option.FIELDS; import static com.google.gcloud.spi.StorageRpc.Option.IF_GENERATION_MATCH; import static com.google.gcloud.spi.StorageRpc.Option.IF_GENERATION_NOT_MATCH; import static com.google.gcloud.spi.StorageRpc.Option.IF_METAGENERATION_MATCH; @@ -29,11 +30,10 @@ import static com.google.gcloud.spi.StorageRpc.Option.PREDEFINED_DEFAULT_OBJECT_ACL; import static com.google.gcloud.spi.StorageRpc.Option.PREFIX; import static com.google.gcloud.spi.StorageRpc.Option.VERSIONS; +import static java.net.HttpURLConnection.HTTP_NOT_FOUND; import com.google.api.client.googleapis.batch.json.JsonBatchCallback; import com.google.api.client.googleapis.json.GoogleJsonError; -import com.google.api.client.googleapis.json.GoogleJsonResponseException; -import com.google.api.client.googleapis.media.MediaHttpDownloader; import com.google.api.client.http.ByteArrayContent; import com.google.api.client.http.GenericUrl; import com.google.api.client.http.HttpHeaders; @@ -57,7 +57,8 @@ import com.google.api.services.storage.model.Objects; import com.google.api.services.storage.model.StorageObject; import com.google.common.base.MoreObjects; -import com.google.common.collect.ImmutableSet; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.gcloud.storage.StorageException; import com.google.gcloud.storage.StorageOptions; @@ -66,9 +67,9 @@ import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; +import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Set; public class DefaultStorageRpc implements StorageRpc { @@ -76,8 +77,8 @@ public class DefaultStorageRpc implements StorageRpc { private final StorageOptions options; private final Storage storage; - // see: https://cloud.google.com/storage/docs/concepts-techniques#practices - private static final Set RETRYABLE_CODES = ImmutableSet.of(504, 503, 502, 500, 429, 408); + private static final long MEGABYTE = 1024L * 1024L; + private static final int MAX_BATCH_DELETES = 100; public DefaultStorageRpc(StorageOptions options) { HttpTransport transport = options.httpTransportFactory().create(); @@ -85,25 +86,16 @@ public DefaultStorageRpc(StorageOptions options) { this.options = options; storage = new Storage.Builder(transport, new JacksonFactory(), initializer) .setRootUrl(options.host()) - .setApplicationName("gcloud-java") + .setApplicationName(options.applicationName()) .build(); } private static StorageException translate(IOException exception) { - StorageException translated; - if (exception instanceof GoogleJsonResponseException) { - translated = translate(((GoogleJsonResponseException) exception).getDetails()); - } else { - translated = new StorageException(0, exception.getMessage(), false); - } - translated.initCause(exception); - return translated; + return new StorageException(exception); } private static StorageException translate(GoogleJsonError exception) { - boolean retryable = RETRYABLE_CODES.contains(exception.getCode()) - || "InternalError".equals(exception.getMessage()); - return new StorageException(exception.getCode(), exception.getMessage(), retryable); + return new StorageException(exception); } @Override @@ -149,6 +141,7 @@ public Tuple> list(Map options) { .setPrefix(PREFIX.getString(options)) .setMaxResults(MAX_RESULTS.getLong(options)) .setPageToken(PAGE_TOKEN.getString(options)) + .setFields(FIELDS.getString(options)) .execute(); return Tuple.>of(buckets.getNextPageToken(), buckets.getItems()); } catch (IOException ex) { @@ -167,6 +160,7 @@ public Tuple> list(String bucket, Map .setPrefix(PREFIX.getString(options)) .setMaxResults(MAX_RESULTS.getLong(options)) .setPageToken(PAGE_TOKEN.getString(options)) + .setFields(FIELDS.getString(options)) .execute(); return Tuple.>of( objects.getNextPageToken(), objects.getItems()); @@ -183,9 +177,14 @@ public Bucket get(Bucket bucket, Map options) { .setProjection(DEFAULT_PROJECTION) .setIfMetagenerationMatch(IF_METAGENERATION_MATCH.getLong(options)) .setIfMetagenerationNotMatch(IF_METAGENERATION_NOT_MATCH.getLong(options)) + .setFields(FIELDS.getString(options)) .execute(); } catch (IOException ex) { - throw translate(ex); + StorageException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return null; + } + throw serviceException; } } @@ -194,7 +193,11 @@ public StorageObject get(StorageObject object, Map options) { try { return getRequest(object, options).execute(); } catch (IOException ex) { - throw translate(ex); + StorageException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return null; + } + throw serviceException; } } @@ -202,11 +205,13 @@ private Storage.Objects.Get getRequest(StorageObject object, Map opti throws IOException { return storage.objects() .get(object.getBucket(), object.getName()) + .setGeneration(object.getGeneration()) .setProjection(DEFAULT_PROJECTION) .setIfMetagenerationMatch(IF_METAGENERATION_MATCH.getLong(options)) .setIfMetagenerationNotMatch(IF_METAGENERATION_NOT_MATCH.getLong(options)) .setIfGenerationMatch(IF_GENERATION_MATCH.getLong(options)) - .setIfGenerationNotMatch(IF_GENERATION_NOT_MATCH.getLong(options)); + .setIfGenerationNotMatch(IF_GENERATION_NOT_MATCH.getLong(options)) + .setFields(FIELDS.getString(options)); } @Override @@ -257,7 +262,7 @@ public boolean delete(Bucket bucket, Map options) { return true; } catch (IOException ex) { StorageException serviceException = translate(ex); - if (serviceException.code() == 404) { + if (serviceException.code() == HTTP_NOT_FOUND) { return false; } throw serviceException; @@ -271,7 +276,7 @@ public boolean delete(StorageObject blob, Map options) { return true; } catch (IOException ex) { StorageException serviceException = translate(ex); - if (serviceException.code() == 404) { + if (serviceException.code() == HTTP_NOT_FOUND) { return false; } throw serviceException; @@ -282,6 +287,7 @@ private Storage.Objects.Delete deleteRequest(StorageObject blob, Map throws IOException { return storage.objects() .delete(blob.getBucket(), blob.getName()) + .setGeneration(blob.getGeneration()) .setIfMetagenerationMatch(IF_METAGENERATION_MATCH.getLong(options)) .setIfMetagenerationNotMatch(IF_METAGENERATION_NOT_MATCH.getLong(options)) .setIfGenerationMatch(IF_GENERATION_MATCH.getLong(options)) @@ -293,7 +299,6 @@ public StorageObject compose(Iterable sources, StorageObject targ Map targetOptions) throws StorageException { ComposeRequest request = new ComposeRequest(); if (target.getContentType() == null) { - // todo: remove once this is no longer requirement (b/20681287). target.setContentType("application/octet-stream"); } request.setDestination(target); @@ -311,7 +316,6 @@ public StorageObject compose(Iterable sources, StorageObject targ } request.setSourceObjects(sourceObjects); try { - // todo: missing setProjection (b/20659000) return storage.objects() .compose(target.getBucket(), target.getName(), request) .setIfMetagenerationMatch(IF_METAGENERATION_MATCH.getLong(targetOptions)) @@ -322,36 +326,13 @@ public StorageObject compose(Iterable sources, StorageObject targ } } - @Override - public StorageObject copy(StorageObject source, Map sourceOptions, - StorageObject target, Map targetOptions) throws StorageException { - try { - return storage - .objects() - .copy(source.getBucket(), source.getName(), target.getBucket(), target.getName(), - target.getContentType() != null ? target : null) - .setProjection(DEFAULT_PROJECTION) - .setIfSourceMetagenerationMatch(IF_SOURCE_METAGENERATION_MATCH.getLong(sourceOptions)) - .setIfSourceMetagenerationNotMatch( - IF_SOURCE_METAGENERATION_NOT_MATCH.getLong(sourceOptions)) - .setIfSourceGenerationMatch(IF_SOURCE_GENERATION_MATCH.getLong(sourceOptions)) - .setIfSourceGenerationNotMatch(IF_SOURCE_GENERATION_NOT_MATCH.getLong(sourceOptions)) - .setIfMetagenerationMatch(IF_METAGENERATION_MATCH.getLong(targetOptions)) - .setIfMetagenerationNotMatch(IF_METAGENERATION_NOT_MATCH.getLong(targetOptions)) - .setIfGenerationMatch(IF_GENERATION_MATCH.getLong(targetOptions)) - .setIfGenerationNotMatch(IF_GENERATION_NOT_MATCH.getLong(targetOptions)) - .execute(); - } catch (IOException ex) { - throw translate(ex); - } - } - @Override public byte[] load(StorageObject from, Map options) throws StorageException { try { Storage.Objects.Get getRequest = storage.objects() .get(from.getBucket(), from.getName()) + .setGeneration(from.getGeneration()) .setIfMetagenerationMatch(IF_METAGENERATION_MATCH.getLong(options)) .setIfMetagenerationNotMatch(IF_METAGENERATION_NOT_MATCH.getLong(options)) .setIfGenerationMatch(IF_GENERATION_MATCH.getLong(options)) @@ -367,6 +348,26 @@ public byte[] load(StorageObject from, Map options) @Override public BatchResponse batch(BatchRequest request) throws StorageException { + List>>> partitionedToDelete = + Lists.partition(request.toDelete, MAX_BATCH_DELETES); + Iterator>>> iterator = partitionedToDelete.iterator(); + BatchRequest chunkRequest = new BatchRequest( + iterator.hasNext() + ? iterator.next() : ImmutableList.>>of(), + request.toUpdate, request.toGet); + BatchResponse response = batchChunk(chunkRequest); + Map> deletes = + Maps.newHashMapWithExpectedSize(request.toDelete.size()); + deletes.putAll(response.deletes); + while (iterator.hasNext()) { + chunkRequest = new BatchRequest(iterator.next(), null, null); + BatchResponse deleteBatchResponse = batchChunk(chunkRequest); + deletes.putAll(deleteBatchResponse.deletes); + } + return new BatchResponse(deletes, response.updates, response.gets); + } + + private BatchResponse batchChunk(BatchRequest request) { com.google.api.client.googleapis.batch.BatchRequest batch = storage.batch(); final Map> deletes = Maps.newConcurrentMap(); @@ -384,7 +385,11 @@ public void onSuccess(Void ignore, HttpHeaders responseHeaders) { @Override public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) { - deletes.put(tuple.x(), Tuple.of(null, translate(e))); + if (e.getCode() == HTTP_NOT_FOUND) { + deletes.put(tuple.x(), Tuple.of(Boolean.FALSE, null)); + } else { + deletes.put(tuple.x(), Tuple.of(null, translate(e))); + } } }); } @@ -413,8 +418,13 @@ public void onSuccess(StorageObject storageObject, HttpHeaders responseHeaders) @Override public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) { - gets.put(tuple.x(), - Tuple.of(null, translate(e))); + if (e.getCode() == HTTP_NOT_FOUND) { + gets.put(tuple.x(), + Tuple.of(null, null)); + } else { + gets.put(tuple.x(), + Tuple.of(null, translate(e))); + } } }); } @@ -426,29 +436,31 @@ public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) { } @Override - public byte[] read(StorageObject from, Map options, long position, int bytes) - throws StorageException { + public Tuple read(StorageObject from, Map options, long position, + int bytes) throws StorageException { try { - Get req = storage.objects().get(from.getBucket(), from.getName()); - req.setIfMetagenerationMatch(IF_METAGENERATION_MATCH.getLong(options)) + Get req = storage.objects() + .get(from.getBucket(), from.getName()) + .setGeneration(from.getGeneration()) + .setIfMetagenerationMatch(IF_METAGENERATION_MATCH.getLong(options)) .setIfMetagenerationNotMatch(IF_METAGENERATION_NOT_MATCH.getLong(options)) .setIfGenerationMatch(IF_GENERATION_MATCH.getLong(options)) .setIfGenerationNotMatch(IF_GENERATION_NOT_MATCH.getLong(options)); - MediaHttpDownloader downloader = req.getMediaHttpDownloader(); - // todo: Fix int casting (https://github.com/google/google-api-java-client/issues/937) - downloader.setContentRange(position, (int) position + bytes); - downloader.setDirectDownloadEnabled(true); + StringBuilder range = new StringBuilder(); + range.append("bytes=").append(position).append("-").append(position + bytes - 1); + req.getRequestHeaders().setRange(range.toString()); ByteArrayOutputStream output = new ByteArrayOutputStream(); - req.executeMediaAndDownloadTo(output); - return output.toByteArray(); + req.executeMedia().download(output); + String etag = req.getLastResponseHeaders().getETag(); + return Tuple.of(etag, output.toByteArray()); } catch (IOException ex) { throw translate(ex); } } @Override - public void write(String uploadId, byte[] toWrite, int toWriteOffset, StorageObject dest, - long destOffset, int length, boolean last) throws StorageException { + public void write(String uploadId, byte[] toWrite, int toWriteOffset, long destOffset, int length, + boolean last) throws StorageException { try { GenericUrl url = new GenericUrl(uploadId); HttpRequest httpRequest = storage.getRequestFactory().buildPutRequest(url, @@ -524,4 +536,47 @@ public String open(StorageObject object, Map options) throw translate(ex); } } + + @Override + public RewriteResponse openRewrite(RewriteRequest rewriteRequest) throws StorageException { + return rewrite(rewriteRequest, null); + } + + @Override + public RewriteResponse continueRewrite(RewriteResponse previousResponse) throws StorageException { + return rewrite(previousResponse.rewriteRequest, previousResponse.rewriteToken); + } + + private RewriteResponse rewrite(RewriteRequest req, String token) throws StorageException { + try { + Long maxBytesRewrittenPerCall = req.megabytesRewrittenPerCall != null + ? req.megabytesRewrittenPerCall * MEGABYTE : null; + com.google.api.services.storage.model.RewriteResponse rewriteResponse = storage.objects() + .rewrite(req.source.getBucket(), req.source.getName(), req.target.getBucket(), + req.target.getName(), req.target.getContentType() != null ? req.target : null) + .setSourceGeneration(req.source.getGeneration()) + .setRewriteToken(token) + .setMaxBytesRewrittenPerCall(maxBytesRewrittenPerCall) + .setProjection(DEFAULT_PROJECTION) + .setIfSourceMetagenerationMatch(IF_SOURCE_METAGENERATION_MATCH.getLong(req.sourceOptions)) + .setIfSourceMetagenerationNotMatch( + IF_SOURCE_METAGENERATION_NOT_MATCH.getLong(req.sourceOptions)) + .setIfSourceGenerationMatch(IF_SOURCE_GENERATION_MATCH.getLong(req.sourceOptions)) + .setIfSourceGenerationNotMatch(IF_SOURCE_GENERATION_NOT_MATCH.getLong(req.sourceOptions)) + .setIfMetagenerationMatch(IF_METAGENERATION_MATCH.getLong(req.targetOptions)) + .setIfMetagenerationNotMatch(IF_METAGENERATION_NOT_MATCH.getLong(req.targetOptions)) + .setIfGenerationMatch(IF_GENERATION_MATCH.getLong(req.targetOptions)) + .setIfGenerationNotMatch(IF_GENERATION_NOT_MATCH.getLong(req.targetOptions)) + .execute(); + return new RewriteResponse( + req, + rewriteResponse.getResource(), + rewriteResponse.getObjectSize().longValue(), + rewriteResponse.getDone(), + rewriteResponse.getRewriteToken(), + rewriteResponse.getTotalBytesRewritten().longValue()); + } catch (IOException ex) { + throw translate(ex); + } + } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpc.java b/gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpc.java index b7ac99bf909e..e15a27114810 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpc.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpc.java @@ -27,6 +27,7 @@ import java.io.InputStream; import java.util.List; import java.util.Map; +import java.util.Objects; public interface StorageRpc { @@ -46,7 +47,8 @@ enum Option { MAX_RESULTS("maxResults"), PAGE_TOKEN("pageToken"), DELIMITER("delimiter"), - VERSIONS("versions"); + VERSIONS("versions"), + FIELDS("fields"); private final String value; @@ -132,6 +134,89 @@ public BatchResponse(Map> delete } } + class RewriteRequest { + + public final StorageObject source; + public final Map sourceOptions; + public final StorageObject target; + public final Map targetOptions; + public final Long megabytesRewrittenPerCall; + + public RewriteRequest(StorageObject source, Map sourceOptions, + StorageObject target, Map targetOptions, + Long megabytesRewrittenPerCall) { + this.source = source; + this.sourceOptions = sourceOptions; + this.target = target; + this.targetOptions = targetOptions; + this.megabytesRewrittenPerCall = megabytesRewrittenPerCall; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (!(obj instanceof RewriteRequest)) { + return false; + } + final RewriteRequest other = (RewriteRequest) obj; + return Objects.equals(this.source, other.source) + && Objects.equals(this.sourceOptions, other.sourceOptions) + && Objects.equals(this.target, other.target) + && Objects.equals(this.targetOptions, other.targetOptions) + && Objects.equals(this.megabytesRewrittenPerCall, other.megabytesRewrittenPerCall); + } + + @Override + public int hashCode() { + return Objects.hash(source, sourceOptions, target, targetOptions, megabytesRewrittenPerCall); + } + } + + class RewriteResponse { + + public final RewriteRequest rewriteRequest; + public final StorageObject result; + public final long blobSize; + public final boolean isDone; + public final String rewriteToken; + public final long totalBytesRewritten; + + public RewriteResponse(RewriteRequest rewriteRequest, StorageObject result, long blobSize, + boolean isDone, String rewriteToken, long totalBytesRewritten) { + this.rewriteRequest = rewriteRequest; + this.result = result; + this.blobSize = blobSize; + this.isDone = isDone; + this.rewriteToken = rewriteToken; + this.totalBytesRewritten = totalBytesRewritten; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (!(obj instanceof RewriteResponse)) { + return false; + } + final RewriteResponse other = (RewriteResponse) obj; + return Objects.equals(this.rewriteRequest, other.rewriteRequest) + && Objects.equals(this.result, other.result) + && Objects.equals(this.rewriteToken, other.rewriteToken) + && this.blobSize == other.blobSize + && Objects.equals(this.isDone, other.isDone) + && this.totalBytesRewritten == other.totalBytesRewritten; + } + + @Override + public int hashCode() { + return Objects.hash(rewriteRequest, result, blobSize, isDone, rewriteToken, + totalBytesRewritten); + } + } + Bucket create(Bucket bucket, Map options) throws StorageException; StorageObject create(StorageObject object, InputStream content, Map options) @@ -142,8 +227,18 @@ StorageObject create(StorageObject object, InputStream content, Map o Tuple> list(String bucket, Map options) throws StorageException; + /** + * Returns the requested bucket or {@code null} if not found. + * + * @throws StorageException upon failure + */ Bucket get(Bucket bucket, Map options) throws StorageException; + /** + * Returns the requested storage object or {@code null} if not found. + * + * @throws StorageException upon failure + */ StorageObject get(StorageObject object, Map options) throws StorageException; @@ -161,17 +256,18 @@ StorageObject patch(StorageObject storageObject, Map options) StorageObject compose(Iterable sources, StorageObject target, Map targetOptions) throws StorageException; - StorageObject copy(StorageObject source, Map sourceOptions, - StorageObject target, Map targetOptions) throws StorageException; - byte[] load(StorageObject storageObject, Map options) throws StorageException; - byte[] read(StorageObject from, Map options, long position, int bytes) + Tuple read(StorageObject from, Map options, long position, int bytes) throws StorageException; String open(StorageObject object, Map options) throws StorageException; - void write(String uploadId, byte[] toWrite, int toWriteOffset, StorageObject dest, - long destOffset, int length, boolean last) throws StorageException; + void write(String uploadId, byte[] toWrite, int toWriteOffset, long destOffset, int length, + boolean last) throws StorageException; + + RewriteResponse openRewrite(RewriteRequest rewriteRequest) throws StorageException; + + RewriteResponse continueRewrite(RewriteResponse previousResponse) throws StorageException; } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Acl.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Acl.java index e5e319b39164..4203d79351b7 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Acl.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Acl.java @@ -23,7 +23,10 @@ import java.util.Objects; /** - * Access Control List on for buckets or blobs. + * Access Control List for buckets or blobs. + * + * @see + * About Access Control Lists */ public final class Acl implements Serializable { @@ -36,7 +39,10 @@ public enum Role { OWNER, READER, WRITER } - public static abstract class Entity implements Serializable { + /** + * Base class for Access Control List entities. + */ + public abstract static class Entity implements Serializable { private static final long serialVersionUID = -2707407252771255840L; @@ -52,25 +58,30 @@ public enum Type { this.value = value; } + /** + * Returns the type of entity. + */ public Type type() { return type; } + /** + * Returns the entity's value. + */ protected String value() { return value; } @Override - public boolean equals(Object o) { - if (this == o) { + public boolean equals(Object obj) { + if (this == obj) { return true; } - if (o == null || getClass() != o.getClass()) { + if (obj == null || getClass() != obj.getClass()) { return false; } - Entity entity = (Entity) o; - return Objects.equals(type, entity.type) && - Objects.equals(value, entity.value); + Entity entity = (Entity) obj; + return Objects.equals(type, entity.type) && Objects.equals(value, entity.value); } @Override @@ -113,42 +124,75 @@ static Entity fromPb(String entity) { } } + /** + * Class for ACL Domain entities. + */ public static final class Domain extends Entity { private static final long serialVersionUID = -3033025857280447253L; + /** + * Creates a domain entity. + * + * @param domain the domain associated to this entity + */ public Domain(String domain) { super(Type.DOMAIN, domain); } + /** + * Returns the domain associated to this entity. + */ public String domain() { return value(); } } + /** + * Class for ACL Group entities. + */ public static final class Group extends Entity { private static final long serialVersionUID = -1660987136294408826L; + /** + * Creates a group entity. + * + * @param email the group email + */ public Group(String email) { super(Type.GROUP, email); } + /** + * Returns the group email. + */ public String email() { return value(); } } + /** + * Class for ACL User entities. + */ public static final class User extends Entity { private static final long serialVersionUID = 3076518036392737008L; private static final String ALL_USERS = "allUsers"; private static final String ALL_AUTHENTICATED_USERS = "allAuthenticatedUsers"; + /** + * Creates a user entity. + * + * @param email the user email + */ public User(String email) { super(Type.USER, email); } + /** + * Returns the user email. + */ public String email() { return value(); } @@ -175,27 +219,42 @@ public static User ofAllAuthenticatedUsers() { } } + /** + * Class for ACL Project entities. + */ public static final class Project extends Entity { private static final long serialVersionUID = 7933776866530023027L; - private final ProjectRole pRole; + private final ProjectRole projectRole; private final String projectId; public enum ProjectRole { OWNERS, EDITORS, VIEWERS } - public Project(ProjectRole pRole, String projectId) { - super(Type.PROJECT, pRole.name().toLowerCase() + "-" + projectId); - this.pRole = pRole; + /** + * Creates a project entity. + * + * @param projectRole a role in the project, used to select project's teams + * @param projectId id of the project + */ + public Project(ProjectRole projectRole, String projectId) { + super(Type.PROJECT, projectRole.name().toLowerCase() + "-" + projectId); + this.projectRole = projectRole; this.projectId = projectId; } + /** + * Returns the role in the project for this entity. + */ public ProjectRole projectRole() { - return pRole; + return projectRole; } + /** + * Returns the project id for this entity. + */ public String projectId() { return projectId; } @@ -215,19 +274,35 @@ String toPb() { } } - public Acl(Entity entity, Role role) { + private Acl(Entity entity, Role role) { this.entity = entity; this.role = role; } + /** + * Returns the entity for this ACL object. + */ public Entity entity() { return entity; } + /** + * Returns the role associated to the entity in this ACL object. + */ public Role role() { return role; } + /** + * Returns an Acl object. + * + * @param entity the entity for this ACL object + * @param role the role to associate to the {@code entity} object + */ + public static Acl of(Entity entity, Role role) { + return new Acl(entity, role); + } + @Override public int hashCode() { return Objects.hash(entity, role); @@ -262,11 +337,11 @@ ObjectAccessControl toObjectPb() { static Acl fromPb(ObjectAccessControl objectAccessControl) { Role role = Role.valueOf(objectAccessControl.getRole()); - return new Acl(Entity.fromPb(objectAccessControl.getEntity()), role); + return Acl.of(Entity.fromPb(objectAccessControl.getEntity()), role); } static Acl fromPb(BucketAccessControl bucketAccessControl) { Role role = Role.valueOf(bucketAccessControl.getRole()); - return new Acl(Entity.fromPb(bucketAccessControl.getEntity()), role); + return Acl.of(Entity.fromPb(bucketAccessControl.getEntity()), role); } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BaseListResult.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BaseListResult.java deleted file mode 100644 index fdcd84705555..000000000000 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BaseListResult.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.gcloud.storage; - -import java.io.Serializable; -import java.util.Collections; -import java.util.Iterator; -import java.util.Objects; - -/** - * Base implementation for Google Cloud storage list result. - */ -public class BaseListResult implements ListResult, Serializable { - - private static final long serialVersionUID = -6937287874908527950L; - - private final String cursor; - private final Iterable results; - private final NextPageFetcher pageFetcher; - - public interface NextPageFetcher extends Serializable { - ListResult nextPage(); - } - - public BaseListResult(NextPageFetcher pageFetcher, String cursor, Iterable results) { - this.pageFetcher = pageFetcher; - this.cursor = cursor; - this.results = results; - } - - @Override - public String nextPageCursor() { - return cursor; - } - - @Override - public ListResult nextPage() { - if (cursor == null || pageFetcher == null) { - return null; - } - return pageFetcher.nextPage(); - } - - @Override - public Iterator iterator() { - return results == null ? Collections.emptyIterator() : results.iterator(); - } - - @Override - public int hashCode() { - return Objects.hash(cursor, results); - } - - @Override - public boolean equals(Object obj) { - if (!(obj instanceof BaseListResult)) { - return false; - } - BaseListResult other = (BaseListResult) obj; - return Objects.equals(cursor, other.cursor) - && Objects.equals(results, other.results); - } -} diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchRequest.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchRequest.java index 6e815648497a..bf77c731754e 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchRequest.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchRequest.java @@ -18,6 +18,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import com.google.gcloud.storage.Storage.BlobGetOption; import com.google.gcloud.storage.Storage.BlobSourceOption; import com.google.gcloud.storage.Storage.BlobTargetOption; @@ -35,13 +36,13 @@ public final class BatchRequest implements Serializable { private final Map> toDelete; private final Map> toUpdate; - private final Map> toGet; + private final Map> toGet; public static class Builder { private Map> toDelete = new LinkedHashMap<>(); private Map> toUpdate = new LinkedHashMap<>(); - private Map> toGet = new LinkedHashMap<>(); + private Map> toGet = new LinkedHashMap<>(); private Builder() {} @@ -72,7 +73,7 @@ public Builder update(BlobInfo blobInfo, BlobTargetOption... options) { /** * Retrieve metadata for the given blob. */ - public Builder get(String bucket, String blob, BlobSourceOption... options) { + public Builder get(String bucket, String blob, BlobGetOption... options) { toGet.put(BlobId.of(bucket, blob), Lists.newArrayList(options)); return this; } @@ -80,7 +81,7 @@ public Builder get(String bucket, String blob, BlobSourceOption... options) { /** * Retrieve metadata for the given blob. */ - public Builder get(BlobId blob, BlobSourceOption... options) { + public Builder get(BlobId blob, BlobGetOption... options) { toGet.put(blob, Lists.newArrayList(options)); return this; } @@ -120,7 +121,7 @@ public Map> toUpdate() { return toUpdate; } - public Map> toGet() { + public Map> toGet() { return toGet; } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchResponse.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchResponse.java index 02b1ca966622..98e7ce09cef0 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchResponse.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BatchResponse.java @@ -113,7 +113,7 @@ static Result empty() { } } - public BatchResponse(List> deleteResult, List> updateResult, + BatchResponse(List> deleteResult, List> updateResult, List> getResult) { this.deleteResult = ImmutableList.copyOf(deleteResult); this.updateResult = ImmutableList.copyOf(updateResult); @@ -133,7 +133,7 @@ public boolean equals(Object obj) { BatchResponse other = (BatchResponse) obj; return Objects.equals(deleteResult, other.deleteResult) && Objects.equals(updateResult, other.updateResult) - && Objects.equals(updateResult, other.updateResult); + && Objects.equals(getResult, other.getResult); } /** diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java index a4a817ead2df..fe65f6ee010b 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Blob.java @@ -18,10 +18,14 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; -import static com.google.gcloud.storage.Blob.BlobSourceOption.convert; +import static com.google.gcloud.storage.Blob.BlobSourceOption.toGetOptions; +import static com.google.gcloud.storage.Blob.BlobSourceOption.toSourceOptions; import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; +import com.google.gcloud.ReadChannel; +import com.google.gcloud.WriteChannel; import com.google.gcloud.spi.StorageRpc; import com.google.gcloud.storage.Storage.BlobTargetOption; import com.google.gcloud.storage.Storage.BlobWriteOption; @@ -29,6 +33,7 @@ import com.google.gcloud.storage.Storage.SignUrlOption; import java.net.URL; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Objects; @@ -37,8 +42,7 @@ /** * A Google cloud storage object. * - *

      - * Objects of this class are immutable. Operations that modify the blob like {@link #update} and + *

      Objects of this class are immutable. Operations that modify the blob like {@link #update} and * {@link #copyTo} return a new object. To get a {@code Blob} object with the most recent * information use {@link #reload}. *

      @@ -48,6 +52,9 @@ public final class Blob { private final Storage storage; private final BlobInfo info; + /** + * Class for specifying blob source options when {@code Blob} methods are used. + */ public static class BlobSourceOption extends Option { private static final long serialVersionUID = 214616862061934846L; @@ -56,7 +63,7 @@ private BlobSourceOption(StorageRpc.Option rpcOption) { super(rpcOption, null); } - private Storage.BlobSourceOption convert(BlobInfo blobInfo) { + private Storage.BlobSourceOption toSourceOptions(BlobInfo blobInfo) { switch (rpcOption()) { case IF_GENERATION_MATCH: return Storage.BlobSourceOption.generationMatch(blobInfo.generation()); @@ -71,27 +78,68 @@ private Storage.BlobSourceOption convert(BlobInfo blobInfo) { } } + private Storage.BlobGetOption toGetOption(BlobInfo blobInfo) { + switch (rpcOption()) { + case IF_GENERATION_MATCH: + return Storage.BlobGetOption.generationMatch(blobInfo.generation()); + case IF_GENERATION_NOT_MATCH: + return Storage.BlobGetOption.generationNotMatch(blobInfo.generation()); + case IF_METAGENERATION_MATCH: + return Storage.BlobGetOption.metagenerationMatch(blobInfo.metageneration()); + case IF_METAGENERATION_NOT_MATCH: + return Storage.BlobGetOption.metagenerationNotMatch(blobInfo.metageneration()); + default: + throw new AssertionError("Unexpected enum value"); + } + } + + /** + * Returns an option for blob's generation match. If this option is used the request will fail + * if generation does not match. + */ public static BlobSourceOption generationMatch() { return new BlobSourceOption(StorageRpc.Option.IF_GENERATION_MATCH); } + /** + * Returns an option for blob's generation mismatch. If this option is used the request will + * fail if generation matches. + */ public static BlobSourceOption generationNotMatch() { return new BlobSourceOption(StorageRpc.Option.IF_GENERATION_NOT_MATCH); } + /** + * Returns an option for blob's metageneration match. If this option is used the request will + * fail if metageneration does not match. + */ public static BlobSourceOption metagenerationMatch() { return new BlobSourceOption(StorageRpc.Option.IF_METAGENERATION_MATCH); } + /** + * Returns an option for blob's metageneration mismatch. If this option is used the request will + * fail if metageneration matches. + */ public static BlobSourceOption metagenerationNotMatch() { return new BlobSourceOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH); } - static Storage.BlobSourceOption[] convert(BlobInfo blobInfo, BlobSourceOption... options) { + static Storage.BlobSourceOption[] toSourceOptions(BlobInfo blobInfo, + BlobSourceOption... options) { Storage.BlobSourceOption[] convertedOptions = new Storage.BlobSourceOption[options.length]; int index = 0; for (BlobSourceOption option : options) { - convertedOptions[index++] = option.convert(blobInfo); + convertedOptions[index++] = option.toSourceOptions(blobInfo); + } + return convertedOptions; + } + + static Storage.BlobGetOption[] toGetOptions(BlobInfo blobInfo, BlobSourceOption... options) { + Storage.BlobGetOption[] convertedOptions = new Storage.BlobGetOption[options.length]; + int index = 0; + for (BlobSourceOption option : options) { + convertedOptions[index++] = option.toGetOption(blobInfo); } return convertedOptions; } @@ -100,7 +148,7 @@ static Storage.BlobSourceOption[] convert(BlobInfo blobInfo, BlobSourceOption... /** * Constructs a {@code Blob} object for the provided {@code BlobInfo}. The storage service is used * to issue requests. - * + * * @param storage the storage service used for issuing requests * @param info blob's info */ @@ -111,29 +159,32 @@ public Blob(Storage storage, BlobInfo info) { /** * Creates a {@code Blob} object for the provided bucket and blob names. Performs an RPC call to - * get the latest blob information. - * + * get the latest blob information. Returns {@code null} if the blob does not exist. + * * @param storage the storage service used for issuing requests * @param bucket bucket's name + * @param options blob get options * @param blob blob's name - * @return the {@code Blob} object or {@code null} if not found. + * @return the {@code Blob} object or {@code null} if not found * @throws StorageException upon failure */ - public static Blob load(Storage storage, String bucket, String blob) { - return load(storage, BlobId.of(bucket, blob)); + public static Blob get(Storage storage, String bucket, String blob, + Storage.BlobGetOption... options) { + return get(storage, BlobId.of(bucket, blob), options); } /** * Creates a {@code Blob} object for the provided {@code blobId}. Performs an RPC call to get the - * latest blob information. - * + * latest blob information. Returns {@code null} if the blob does not exist. + * * @param storage the storage service used for issuing requests * @param blobId blob's identifier - * @return the {@code Blob} object or {@code null} if not found. + * @param options blob get options + * @return the {@code Blob} object or {@code null} if not found * @throws StorageException upon failure */ - public static Blob load(Storage storage, BlobId blobId) { - BlobInfo info = storage.get(blobId); + public static Blob get(Storage storage, BlobId blobId, Storage.BlobGetOption... options) { + BlobInfo info = storage.get(blobId, options); return info != null ? new Blob(storage, info) : null; } @@ -159,7 +210,10 @@ public BlobId id() { * @throws StorageException upon failure */ public boolean exists(BlobSourceOption... options) { - return storage.get(info.blobId(), convert(info, options)) != null; + int length = options.length; + Storage.BlobGetOption[] getOptions = Arrays.copyOf(toGetOptions(info, options), length + 1); + getOptions[length] = Storage.BlobGetOption.fields(); + return storage.get(info.blobId(), getOptions) != null; } /** @@ -173,14 +227,14 @@ public byte[] content(Storage.BlobSourceOption... options) { } /** - * Fetches current blob's latest information. + * Fetches current blob's latest information. Returns {@code null} if the blob does not exist. * * @param options blob read options - * @return a {@code Blob} object with latest information + * @return a {@code Blob} object with latest information or {@code null} if not found * @throws StorageException upon failure */ public Blob reload(BlobSourceOption... options) { - return new Blob(storage, storage.get(info.blobId(), convert(info, options))); + return Blob.get(storage, info.blobId(), toGetOptions(info, options)); } /** @@ -191,6 +245,16 @@ public Blob reload(BlobSourceOption... options) { * if the current blob metadata are at their latest version use the {@code metagenerationMatch} * option: {@code blob.update(newInfo, BlobTargetOption.metagenerationMatch())}. * + *

      Original metadata are merged with metadata in the provided {@code blobInfo}. To replace + * metadata instead you first have to unset them. Unsetting metadata can be done by setting the + * provided {@code blobInfo}'s metadata to {@code null}. + *

      + * + *

      Example usage of replacing blob's metadata: + *

          {@code blob.update(blob.info().toBuilder().metadata(null).build());}
      +   *    {@code blob.update(blob.info().toBuilder().metadata(newMetadata).build());}
      +   * 
      + * * @param blobInfo new blob's information. Bucket and blob names must match the current ones * @param options update options * @return a {@code Blob} object with updated information @@ -203,81 +267,80 @@ public Blob update(BlobInfo blobInfo, BlobTargetOption... options) { } /** - * Copies this blob to the specified target. Possibly copying also some of the metadata - * (e.g. content-type). + * Deletes this blob. * - * @param targetBlob target blob's id - * @param options source blob options - * @return the copied blob + * @param options blob delete options + * @return {@code true} if blob was deleted, {@code false} if it was not found * @throws StorageException upon failure */ - public Blob copyTo(BlobId targetBlob, BlobSourceOption... options) { - BlobInfo updatedInfo = info.toBuilder().blobId(targetBlob).build(); - CopyRequest copyRequest = CopyRequest.builder().source(info.bucket(), info.name()) - .sourceOptions(convert(info, options)).target(updatedInfo).build(); - return new Blob(storage, storage.copy(copyRequest)); + public boolean delete(BlobSourceOption... options) { + return storage.delete(info.blobId(), toSourceOptions(info, options)); } /** - * Deletes this blob. + * Sends a copy request for the current blob to the target blob. Possibly also some of the + * metadata are copied (e.g. content-type). * - * @param options blob delete options - * @return true if blob was deleted + * @param targetBlob target blob's id + * @param options source blob options + * @return a {@link CopyWriter} object that can be used to get information on the newly created + * blob or to complete the copy if more than one RPC request is needed * @throws StorageException upon failure */ - public boolean delete(BlobSourceOption... options) { - return storage.delete(info.blobId(), convert(info, options)); + public CopyWriter copyTo(BlobId targetBlob, BlobSourceOption... options) { + CopyRequest copyRequest = CopyRequest.builder().source(info.bucket(), info.name()) + .sourceOptions(toSourceOptions(info, options)).target(targetBlob).build(); + return storage.copy(copyRequest); } /** - * Copies this blob to the target bucket, preserving its name. Possibly copying also some of the - * metadata (e.g. content-type). + * Sends a copy request for the current blob to the target bucket, preserving its name. Possibly + * copying also some of the metadata (e.g. content-type). * * @param targetBucket target bucket's name * @param options source blob options - * @return the copied blob + * @return a {@link CopyWriter} object that can be used to get information on the newly created + * blob or to complete the copy if more than one RPC request is needed * @throws StorageException upon failure */ - public Blob copyTo(String targetBucket, BlobSourceOption... options) { + public CopyWriter copyTo(String targetBucket, BlobSourceOption... options) { return copyTo(targetBucket, info.name(), options); } /** - * Copies this blob to the target bucket with a new name. Possibly copying also some of the - * metadata (e.g. content-type). + * Sends a copy request for the current blob to the target blob. Possibly also some of the + * metadata are copied (e.g. content-type). * * @param targetBucket target bucket's name * @param targetBlob target blob's name * @param options source blob options - * @return the copied blob + * @return a {@link CopyWriter} object that can be used to get information on the newly created + * blob or to complete the copy if more than one RPC request is needed * @throws StorageException upon failure */ - public Blob copyTo(String targetBucket, String targetBlob, BlobSourceOption... options) { - BlobInfo updatedInfo = info.toBuilder().blobId(BlobId.of(targetBucket, targetBlob)).build(); - CopyRequest copyRequest = CopyRequest.builder().source(info.bucket(), info.name()) - .sourceOptions(convert(info, options)).target(updatedInfo).build(); - return new Blob(storage, storage.copy(copyRequest)); + public CopyWriter copyTo(String targetBucket, String targetBlob, BlobSourceOption... options) { + return copyTo(BlobId.of(targetBucket, targetBlob), options); } /** - * Returns a {@code BlobReadChannel} object for reading this blob's content. + * Returns a {@code ReadChannel} object for reading this blob's content. * * @param options blob read options * @throws StorageException upon failure */ - public BlobReadChannel reader(BlobSourceOption... options) { - return storage.reader(info.blobId(), convert(info, options)); + public ReadChannel reader(BlobSourceOption... options) { + return storage.reader(info.blobId(), toSourceOptions(info, options)); } /** - * Returns a {@code BlobWriteChannel} object for writing to this blob. By default any md5 and + * Returns a {@code WriteChannel} object for writing to this blob. By default any md5 and * crc32c values in the current blob are ignored unless requested via the * {@code BlobWriteOption.md5Match} and {@code BlobWriteOption.crc32cMatch} options. * * @param options target blob options * @throws StorageException upon failure */ - public BlobWriteChannel writer(BlobWriteOption... options) { + public WriteChannel writer(BlobWriteOption... options) { return storage.writer(info, options); } @@ -306,38 +369,63 @@ public Storage storage() { } /** - * Gets the requested blobs. If {@code infos.length == 0} an empty list is returned. If - * {@code infos.length > 1} a batch request is used to fetch blobs. + * Gets the requested blobs. A batch request is used to fetch blobs. * * @param storage the storage service used to issue the request - * @param blobs the blobs to get + * @param first the first blob to get + * @param second the second blob to get + * @param other other blobs to get * @return an immutable list of {@code Blob} objects. If a blob does not exist or access to it has - * been denied the corresponding item in the list is {@code null}. + * been denied the corresponding item in the list is {@code null} * @throws StorageException upon failure */ - public static List get(final Storage storage, BlobId... blobs) { + public static List get(Storage storage, BlobId first, BlobId second, BlobId... other) { + checkNotNull(storage); + checkNotNull(first); + checkNotNull(second); + checkNotNull(other); + ImmutableList blobs = ImmutableList.builder() + .add(first) + .add(second) + .addAll(Arrays.asList(other)) + .build(); + return get(storage, blobs); + } + + /** + * Gets the requested blobs. A batch request is used to fetch blobs. + * + * @param storage the storage service used to issue the request + * @param blobs list of blobs to get + * @return an immutable list of {@code Blob} objects. If a blob does not exist or access to it has + * been denied the corresponding item in the list is {@code null} + * @throws StorageException upon failure + */ + public static List get(final Storage storage, List blobs) { checkNotNull(storage); checkNotNull(blobs); - if (blobs.length == 0) { - return Collections.emptyList(); - } - return Collections.unmodifiableList(Lists.transform(storage.get(blobs), + BlobId[] blobArray = blobs.toArray(new BlobId[blobs.size()]); + return Collections.unmodifiableList(Lists.transform(storage.get(blobArray), new Function() { @Override - public Blob apply(BlobInfo f) { - return f != null ? new Blob(storage, f) : null; + public Blob apply(BlobInfo blobInfo) { + return blobInfo != null ? new Blob(storage, blobInfo) : null; } })); } /** - * Updates the requested blobs. If {@code infos.length == 0} an empty list is returned. If - * {@code infos.length > 1} a batch request is used to update blobs. + * Updates the requested blobs. A batch request is used to update blobs. Original metadata are + * merged with metadata in the provided {@code BlobInfo} objects. To replace metadata instead + * you first have to unset them. Unsetting metadata can be done by setting the provided + * {@code BlobInfo} objects metadata to {@code null}. See + * {@link #update(com.google.gcloud.storage.BlobInfo, + * com.google.gcloud.storage.Storage.BlobTargetOption...) } for a code example. * * @param storage the storage service used to issue the request * @param infos the blobs to update * @return an immutable list of {@code Blob} objects. If a blob does not exist or access to it has - * been denied the corresponding item in the list is {@code null}. + * been denied the corresponding item in the list is {@code null} * @throws StorageException upon failure */ public static List update(final Storage storage, BlobInfo... infos) { @@ -349,21 +437,20 @@ public static List update(final Storage storage, BlobInfo... infos) { return Collections.unmodifiableList(Lists.transform(storage.update(infos), new Function() { @Override - public Blob apply(BlobInfo f) { - return f != null ? new Blob(storage, f) : null; + public Blob apply(BlobInfo blobInfo) { + return blobInfo != null ? new Blob(storage, blobInfo) : null; } })); } /** - * Deletes the requested blobs. If {@code infos.length == 0} an empty list is returned. If - * {@code infos.length > 1} a batch request is used to delete blobs. + * Deletes the requested blobs. A batch request is used to delete blobs. * * @param storage the storage service used to issue the request * @param blobs the blobs to delete * @return an immutable list of booleans. If a blob has been deleted the corresponding item in the - * list is {@code true}. If deletion failed or access to the resource was denied the item is - * {@code false}. + * list is {@code true}. If a blob was not found, deletion failed or access to the resource + * was denied the corresponding item is {@code false} * @throws StorageException upon failure */ public static List delete(Storage storage, BlobId... blobs) { diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobId.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobId.java index ea98e65d244d..d30003d632db 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobId.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobId.java @@ -16,67 +16,106 @@ package com.google.gcloud.storage; -import com.google.api.services.storage.model.StorageObject; import static com.google.common.base.Preconditions.checkNotNull; +import com.google.api.services.storage.model.StorageObject; import com.google.common.base.MoreObjects; import java.io.Serializable; import java.util.Objects; /** - * Google Storage object identifier. + * Google Storage Object identifier. A {@code BlobId} object includes the name of the containing + * bucket, the blob's name and possibly the blob's generation. If {@link #generation()} is + * {@code null} the identifier refers to the latest blob's generation. */ public final class BlobId implements Serializable { private static final long serialVersionUID = -6156002883225601925L; private final String bucket; private final String name; + private final Long generation; - private BlobId(String bucket, String name) { + private BlobId(String bucket, String name, Long generation) { this.bucket = bucket; this.name = name; + this.generation = generation; } + /** + * Returns the name of the bucket containing the blob. + */ public String bucket() { return bucket; } + /** + * Returns the name of the blob. + */ public String name() { return name; } + /** + * Returns blob's data generation. Used for versioning. + */ + public Long generation() { + return generation; + } + @Override public String toString() { return MoreObjects.toStringHelper(this) .add("bucket", bucket()) .add("name", name()) + .add("generation", generation()) .toString(); } @Override public int hashCode() { - return Objects.hash(bucket, name); + return Objects.hash(bucket, name, generation); } @Override public boolean equals(Object obj) { return obj instanceof BlobId && Objects.equals(bucket, ((BlobId) obj).bucket) - && Objects.equals(name, ((BlobId) obj).name); + && Objects.equals(name, ((BlobId) obj).name) + && Objects.equals(generation, ((BlobId) obj).generation); } StorageObject toPb() { StorageObject storageObject = new StorageObject(); storageObject.setBucket(bucket); storageObject.setName(name); + storageObject.setGeneration(generation); return storageObject; } + /** + * Creates a blob identifier. Generation is set to {@code null}. + * + * @param bucket the name of the bucket that contains the blob + * @param name the name of the blob + */ public static BlobId of(String bucket, String name) { - return new BlobId(checkNotNull(bucket), checkNotNull(name)); + return new BlobId(checkNotNull(bucket), checkNotNull(name), null); + } + + /** + * Creates a {@code BlobId} object. + * + * @param bucket name of the containing bucket + * @param name blob's name + * @param generation blob's data generation, used for versioning. If {@code null} the identifier + * refers to the latest blob's generation + */ + public static BlobId of(String bucket, String name, Long generation) { + return new BlobId(checkNotNull(bucket), checkNotNull(name), generation); } static BlobId fromPb(StorageObject storageObject) { - return BlobId.of(storageObject.getBucket(), storageObject.getName()); + return BlobId.of(storageObject.getBucket(), storageObject.getName(), + storageObject.getGeneration()); } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobInfo.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobInfo.java index 8e6921bbc20d..b27d00d68a16 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobInfo.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobInfo.java @@ -27,14 +27,19 @@ import com.google.common.base.Function; import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; +import com.google.common.collect.Maps; import java.io.Serializable; import java.math.BigInteger; +import java.util.AbstractMap; +import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; /** * Google Storage object metadata. @@ -71,7 +76,6 @@ public StorageObject apply(BlobInfo blobInfo) { private final String crc32c; private final String mediaLink; private final Map metadata; - private final Long generation; private final Long metageneration; private final Long deleteTime; private final Long updateTime; @@ -81,6 +85,17 @@ public StorageObject apply(BlobInfo blobInfo) { private final String contentLanguage; private final Integer componentCount; + /** + * This class is meant for internal use only. Users are discouraged from using this class. + */ + public static final class ImmutableEmptyMap extends AbstractMap { + + @Override + public Set> entrySet() { + return ImmutableSet.of(); + } + } + public static final class Builder { private BlobId blobId; @@ -91,7 +106,7 @@ public static final class Builder { private String contentLanguage; private Integer componentCount; private String cacheControl; - private ImmutableList acl; + private List acl; private Acl.Entity owner; private Long size; private String etag; @@ -99,14 +114,39 @@ public static final class Builder { private String md5; private String crc32c; private String mediaLink; - private ImmutableMap metadata; - private Long generation; + private Map metadata; private Long metageneration; private Long deleteTime; private Long updateTime; private Builder() {} + private Builder(BlobInfo blobInfo) { + blobId = blobInfo.blobId; + id = blobInfo.id; + cacheControl = blobInfo.cacheControl; + contentEncoding = blobInfo.contentEncoding; + contentType = blobInfo.contentType; + contentDisposition = blobInfo.contentDisposition; + contentLanguage = blobInfo.contentLanguage; + componentCount = blobInfo.componentCount; + acl = blobInfo.acl; + owner = blobInfo.owner; + size = blobInfo.size; + etag = blobInfo.etag; + selfLink = blobInfo.selfLink; + md5 = blobInfo.md5; + crc32c = blobInfo.crc32c; + mediaLink = blobInfo.mediaLink; + metadata = blobInfo.metadata; + metageneration = blobInfo.metageneration; + deleteTime = blobInfo.deleteTime; + updateTime = blobInfo.updateTime; + } + + /** + * Sets the blob identity. + */ public Builder blobId(BlobId blobId) { this.blobId = checkNotNull(blobId); return this; @@ -117,21 +157,41 @@ Builder id(String id) { return this; } + /** + * Sets the blob's data content type. + * + * @see Content-Type + */ public Builder contentType(String contentType) { this.contentType = firstNonNull(contentType, Data.nullOf(String.class)); return this; } + /** + * Sets the blob's data content disposition. + * + * @see Content-Disposition + */ public Builder contentDisposition(String contentDisposition) { this.contentDisposition = firstNonNull(contentDisposition, Data.nullOf(String.class)); return this; } + /** + * Sets the blob's data content language. + * + * @see Content-Language + */ public Builder contentLanguage(String contentLanguage) { this.contentLanguage = firstNonNull(contentLanguage, Data.nullOf(String.class)); return this; } + /** + * Sets the blob's data content encoding. + * + * @see Content-Encoding + */ public Builder contentEncoding(String contentEncoding) { this.contentEncoding = firstNonNull(contentEncoding, Data.nullOf(String.class)); return this; @@ -142,11 +202,22 @@ Builder componentCount(Integer componentCount) { return this; } + /** + * Sets the blob's data cache control. + * + * @see Cache-Control + */ public Builder cacheControl(String cacheControl) { this.cacheControl = firstNonNull(cacheControl, Data.nullOf(String.class)); return this; } + /** + * Sets the blob's access control configuration. + * + * @see + * About Access Control Lists + */ public Builder acl(List acl) { this.acl = acl != null ? ImmutableList.copyOf(acl) : null; return this; @@ -172,11 +243,25 @@ Builder selfLink(String selfLink) { return this; } + /** + * Sets the MD5 hash of blob's data. MD5 value must be encoded in base64. + * + * @see + * Hashes and ETags: Best Practices + */ public Builder md5(String md5) { this.md5 = firstNonNull(md5, Data.nullOf(String.class)); return this; } + /** + * Sets the CRC32C checksum of blob's data as described in + * RFC 4960, Appendix B; encoded in + * base64 in big-endian order. + * + * @see + * Hashes and ETags: Best Practices + */ public Builder crc32c(String crc32c) { this.crc32c = firstNonNull(crc32c, Data.nullOf(String.class)); return this; @@ -187,13 +272,12 @@ Builder mediaLink(String mediaLink) { return this; } + /** + * Sets the blob's user provided metadata. + */ public Builder metadata(Map metadata) { - this.metadata = metadata != null ? ImmutableMap.copyOf(metadata) : null; - return this; - } - - Builder generation(Long generation) { - this.generation = generation; + this.metadata = metadata != null + ? new HashMap<>(metadata) : Data.>nullOf(ImmutableEmptyMap.class); return this; } @@ -212,6 +296,9 @@ Builder updateTime(Long updateTime) { return this; } + /** + * Creates a {@code BlobInfo} object. + */ public BlobInfo build() { checkNotNull(blobId); return new BlobInfo(this); @@ -236,127 +323,210 @@ private BlobInfo(Builder builder) { crc32c = builder.crc32c; mediaLink = builder.mediaLink; metadata = builder.metadata; - generation = builder.generation; metageneration = builder.metageneration; deleteTime = builder.deleteTime; updateTime = builder.updateTime; } + /** + * Returns the blob's identity. + */ public BlobId blobId() { return blobId; } + /** + * Returns the name of the containing bucket. + */ public String bucket() { return blobId().bucket(); } + /** + * Returns the blob's id. + */ public String id() { return id; } + /** + * Returns the blob's name. + */ public String name() { return blobId().name(); } + /** + * Returns the blob's data cache control. + * + * @see Cache-Control + */ public String cacheControl() { return Data.isNull(cacheControl) ? null : cacheControl; } + /** + * Returns the blob's access control configuration. + * + * @see + * About Access Control Lists + */ public List acl() { return acl; } + /** + * Returns the blob's owner. This will always be the uploader of the blob. + */ public Acl.Entity owner() { return owner; } + /** + * Returns the content length of the data in bytes. + * + * @see Content-Length + */ public Long size() { return size; } + /** + * Returns the blob's data content type. + * + * @see Content-Type + */ public String contentType() { return Data.isNull(contentType) ? null : contentType; } + /** + * Returns the blob's data content encoding. + * + * @see Content-Encoding + */ public String contentEncoding() { return Data.isNull(contentEncoding) ? null : contentEncoding; } + /** + * Returns the blob's data content disposition. + * + * @see Content-Disposition + */ public String contentDisposition() { return Data.isNull(contentDisposition) ? null : contentDisposition; } + /** + * Returns the blob's data content language. + * + * @see Content-Language + */ public String contentLanguage() { return Data.isNull(contentLanguage) ? null : contentLanguage; } + /** + * Returns the number of components that make up this blob. Components are accumulated through + * the {@link Storage#compose(Storage.ComposeRequest)} operation and are limited to a count of + * 1024, counting 1 for each non-composite component blob and componentCount for each composite + * component blob. This value is set only for composite blobs. + * + * @see Component Count + * Property + */ public Integer componentCount() { return componentCount; } + /** + * Returns HTTP 1.1 Entity tag for the blob. + * + * @see Entity Tags + */ public String etag() { return etag; } + /** + * Returns the URI of this blob as a string. + */ public String selfLink() { return selfLink; } + /** + * Returns the MD5 hash of blob's data encoded in base64. + * + * @see + * Hashes and ETags: Best Practices + */ public String md5() { return Data.isNull(md5) ? null : md5; } + /** + * Returns the CRC32C checksum of blob's data as described in + * RFC 4960, Appendix B; encoded in + * base64 in big-endian order. + * + * @see + * Hashes and ETags: Best Practices + */ public String crc32c() { return Data.isNull(crc32c) ? null : crc32c; } + /** + * Returns the blob's media download link. + */ public String mediaLink() { return mediaLink; } + /** + * Returns blob's user provided metadata. + */ public Map metadata() { - return metadata; + return metadata == null || Data.isNull(metadata) ? null : Collections.unmodifiableMap(metadata); } + /** + * Returns blob's data generation. Used for blob versioning. + */ public Long generation() { - return generation; + return blobId().generation(); } + /** + * Returns blob's metageneration. Used for preconditions and for detecting changes in metadata. + * A metageneration number is only meaningful in the context of a particular generation of a + * particular blob. + */ public Long metageneration() { return metageneration; } + /** + * Returns the deletion time of the blob. + */ public Long deleteTime() { return deleteTime; } + /** + * Returns the last modification time of the blob's metadata. + */ public Long updateTime() { return updateTime; } + /** + * Returns a builder for the current blob. + */ public Builder toBuilder() { - return new Builder() - .blobId(blobId) - .id(id) - .generation(generation) - .cacheControl(cacheControl) - .contentEncoding(contentEncoding) - .contentType(contentType) - .contentDisposition(contentDisposition) - .contentLanguage(contentLanguage) - .componentCount(componentCount) - .crc32c(crc32c) - .md5(md5) - .deleteTime(deleteTime) - .updateTime(updateTime) - .mediaLink(mediaLink) - .metadata(metadata) - .metageneration(metageneration) - .acl(acl) - .owner(owner) - .size(size) - .etag(etag) - .selfLink(selfLink); + return new Builder(this); } @Override @@ -364,6 +534,7 @@ public String toString() { return MoreObjects.toStringHelper(this) .add("bucket", bucket()) .add("name", name()) + .add("generation", generation()) .add("size", size()) .add("content-type", contentType()) .add("metadata", metadata()) @@ -402,14 +573,21 @@ public ObjectAccessControl apply(Acl acl) { if (owner != null) { storageObject.setOwner(new Owner().setEntity(owner.toPb())); } + Map pbMetadata = metadata; + if (metadata != null && !Data.isNull(metadata)) { + pbMetadata = Maps.newHashMapWithExpectedSize(metadata.size()); + for (Map.Entry entry : metadata.entrySet()) { + pbMetadata.put(entry.getKey(), + firstNonNull(entry.getValue(), Data.nullOf(String.class))); + } + } + storageObject.setMetadata(pbMetadata); storageObject.setCacheControl(cacheControl); storageObject.setContentEncoding(contentEncoding); storageObject.setCrc32c(crc32c); storageObject.setContentType(contentType); - storageObject.setGeneration(generation); storageObject.setMd5Hash(md5); storageObject.setMediaLink(mediaLink); - storageObject.setMetadata(metadata); storageObject.setMetageneration(metageneration); storageObject.setContentDisposition(contentDisposition); storageObject.setComponentCount(componentCount); @@ -420,14 +598,34 @@ public ObjectAccessControl apply(Acl acl) { return storageObject; } + /** + * Returns a {@code BlobInfo} builder where blob identity is set using the provided values. + */ public static Builder builder(BucketInfo bucketInfo, String name) { return builder(bucketInfo.name(), name); } + /** + * Returns a {@code BlobInfo} builder where blob identity is set using the provided values. + */ public static Builder builder(String bucket, String name) { return new Builder().blobId(BlobId.of(bucket, name)); } + /** + * Returns a {@code BlobInfo} builder where blob identity is set using the provided values. + */ + public static Builder builder(BucketInfo bucketInfo, String name, Long generation) { + return builder(bucketInfo.name(), name, generation); + } + + /** + * Returns a {@code BlobInfo} builder where blob identity is set using the provided values. + */ + public static Builder builder(String bucket, String name, Long generation) { + return new Builder().blobId(BlobId.of(bucket, name, generation)); + } + public static Builder builder(BlobId blobId) { return new Builder().blobId(blobId); } @@ -446,9 +644,6 @@ static BlobInfo fromPb(StorageObject storageObject) { if (storageObject.getContentType() != null) { builder.contentType(storageObject.getContentType()); } - if (storageObject.getGeneration() != null) { - builder.generation(storageObject.getGeneration()); - } if (storageObject.getMd5Hash() != null) { builder.md5(storageObject.getMd5Hash()); } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobListResult.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobListResult.java deleted file mode 100644 index 9e6ec9dc5655..000000000000 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobListResult.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.gcloud.storage; - -import static com.google.common.base.Preconditions.checkNotNull; - -import com.google.common.base.Function; -import com.google.common.collect.Iterators; - -import java.util.Iterator; -import java.util.Objects; - -/** - * Implementation of a paginated list of Google Cloud storage {@code Blob}. - */ -public class BlobListResult implements ListResult { - - private final ListResult infoList; - private final Storage storage; - - public BlobListResult(Storage storage, ListResult infoList) { - this.storage = checkNotNull(storage); - this.infoList = checkNotNull(infoList); - } - - @Override - public String nextPageCursor() { - return infoList.nextPageCursor(); - } - - @Override - public ListResult nextPage() { - ListResult nextPageInfoList = infoList.nextPage(); - if (nextPageInfoList == null) { - return null; - } - return new BlobListResult(storage, nextPageInfoList); - } - - @Override - public Iterator iterator() { - return Iterators.transform(infoList.iterator(), new Function() { - @Override - public Blob apply(BlobInfo info) { - return new Blob(storage, info); - } - }); - } - - @Override - public int hashCode() { - return Objects.hash(infoList); - } - - @Override - public boolean equals(Object obj) { - if (!(obj instanceof BlobListResult)) { - return false; - } - BlobListResult other = (BlobListResult) obj; - return Objects.equals(infoList, other.infoList); - } -} diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java index b004e3d61634..121f2eb63589 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannel.java @@ -16,42 +16,264 @@ package com.google.gcloud.storage; +import static com.google.gcloud.RetryHelper.runWithRetries; + +import com.google.api.services.storage.model.StorageObject; +import com.google.common.base.MoreObjects; +import com.google.gcloud.ReadChannel; import com.google.gcloud.RestorableState; +import com.google.gcloud.RetryHelper; +import com.google.gcloud.spi.StorageRpc; +import com.google.gcloud.spi.StorageRpc.Tuple; -import java.io.Closeable; import java.io.IOException; -import java.nio.channels.ReadableByteChannel; +import java.io.Serializable; +import java.nio.ByteBuffer; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Callable; /** - * A channel for reading data from a Google Cloud Storage object. - * - * Implementations of this class may buffer data internally to reduce remote calls. - * - * This class is @{link Serializable}, which allows incremental reads. + * Default implementation for ReadChannel. */ -public interface BlobReadChannel extends ReadableByteChannel, Closeable { +class BlobReadChannel implements ReadChannel { + + private static final int DEFAULT_CHUNK_SIZE = 2 * 1024 * 1024; + + private final StorageOptions serviceOptions; + private final BlobId blob; + private final Map requestOptions; + private String lastEtag; + private int position; + private boolean isOpen; + private boolean endOfStream; + private int chunkSize = DEFAULT_CHUNK_SIZE; + + private final StorageRpc storageRpc; + private final StorageObject storageObject; + private int bufferPos; + private byte[] buffer; + + BlobReadChannel(StorageOptions serviceOptions, BlobId blob, + Map requestOptions) { + this.serviceOptions = serviceOptions; + this.blob = blob; + this.requestOptions = requestOptions; + isOpen = true; + storageRpc = serviceOptions.rpc(); + storageObject = blob.toPb(); + } + + @Override + public RestorableState capture() { + StateImpl.Builder builder = StateImpl.builder(serviceOptions, blob, requestOptions) + .position(position) + .isOpen(isOpen) + .endOfStream(endOfStream) + .chunkSize(chunkSize); + if (buffer != null) { + builder.position(position + bufferPos); + builder.endOfStream(false); + } + return builder.build(); + } + + @Override + public boolean isOpen() { + return isOpen; + } + + @Override + public void close() { + if (isOpen) { + buffer = null; + isOpen = false; + } + } + + private void validateOpen() throws IOException { + if (!isOpen) { + throw new IOException("stream is closed"); + } + } + + @Override + public void seek(int position) throws IOException { + validateOpen(); + this.position = position; + buffer = null; + bufferPos = 0; + endOfStream = false; + } + + @Override + public void chunkSize(int chunkSize) { + this.chunkSize = chunkSize <= 0 ? DEFAULT_CHUNK_SIZE : chunkSize; + } - /** - * Overridden to remove IOException. - * - * @see java.nio.channels.Channel#close() - */ @Override - void close(); - - void seek(int position) throws IOException; - - /** - * Sets the minimum size that will be read by a single RPC. - * Read data will be locally buffered until consumed. - */ - void chunkSize(int chunkSize); - - /** - * Saves the read channel state. - * - * @return a {@link RestorableState} object that contains the read channel state and can restore - * it afterwards. State object must implement {@link java.io.Serializable}. - */ - public RestorableState save(); + public int read(ByteBuffer byteBuffer) throws IOException { + validateOpen(); + if (buffer == null) { + if (endOfStream) { + return -1; + } + final int toRead = Math.max(byteBuffer.remaining(), chunkSize); + try { + Tuple result = runWithRetries(new Callable>() { + @Override + public Tuple call() { + return storageRpc.read(storageObject, requestOptions, position, toRead); + } + }, serviceOptions.retryParams(), StorageImpl.EXCEPTION_HANDLER); + if (lastEtag != null && !Objects.equals(result.x(), lastEtag)) { + StringBuilder messageBuilder = new StringBuilder(); + messageBuilder.append("Blob ").append(blob).append(" was updated while reading"); + throw new StorageException(0, messageBuilder.toString()); + } + lastEtag = result.x(); + buffer = result.y(); + } catch (RetryHelper.RetryHelperException e) { + throw StorageException.translateAndThrow(e); + } + if (toRead > buffer.length) { + endOfStream = true; + if (buffer.length == 0) { + buffer = null; + return -1; + } + } + } + int toWrite = Math.min(buffer.length - bufferPos, byteBuffer.remaining()); + byteBuffer.put(buffer, bufferPos, toWrite); + bufferPos += toWrite; + if (bufferPos >= buffer.length) { + position += buffer.length; + buffer = null; + bufferPos = 0; + } + return toWrite; + } + + static class StateImpl implements RestorableState, Serializable { + + private static final long serialVersionUID = 3889420316004453706L; + + private final StorageOptions serviceOptions; + private final BlobId blob; + private final Map requestOptions; + private final String lastEtag; + private final int position; + private final boolean isOpen; + private final boolean endOfStream; + private final int chunkSize; + + StateImpl(Builder builder) { + this.serviceOptions = builder.serviceOptions; + this.blob = builder.blob; + this.requestOptions = builder.requestOptions; + this.lastEtag = builder.lastEtag; + this.position = builder.position; + this.isOpen = builder.isOpen; + this.endOfStream = builder.endOfStream; + this.chunkSize = builder.chunkSize; + } + + static class Builder { + private final StorageOptions serviceOptions; + private final BlobId blob; + private final Map requestOptions; + private String lastEtag; + private int position; + private boolean isOpen; + private boolean endOfStream; + private int chunkSize; + + private Builder(StorageOptions options, BlobId blob, Map reqOptions) { + this.serviceOptions = options; + this.blob = blob; + this.requestOptions = reqOptions; + } + + Builder lastEtag(String lastEtag) { + this.lastEtag = lastEtag; + return this; + } + + Builder position(int position) { + this.position = position; + return this; + } + + Builder isOpen(boolean isOpen) { + this.isOpen = isOpen; + return this; + } + + Builder endOfStream(boolean endOfStream) { + this.endOfStream = endOfStream; + return this; + } + + Builder chunkSize(int chunkSize) { + this.chunkSize = chunkSize; + return this; + } + + RestorableState build() { + return new StateImpl(this); + } + } + + static Builder builder( + StorageOptions options, BlobId blob, Map reqOptions) { + return new Builder(options, blob, reqOptions); + } + + @Override + public ReadChannel restore() { + BlobReadChannel channel = new BlobReadChannel(serviceOptions, blob, requestOptions); + channel.lastEtag = lastEtag; + channel.position = position; + channel.isOpen = isOpen; + channel.endOfStream = endOfStream; + channel.chunkSize = chunkSize; + return channel; + } + + @Override + public int hashCode() { + return Objects.hash(serviceOptions, blob, requestOptions, lastEtag, position, isOpen, + endOfStream, chunkSize); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (!(obj instanceof StateImpl)) { + return false; + } + final StateImpl other = (StateImpl) obj; + return Objects.equals(this.serviceOptions, other.serviceOptions) + && Objects.equals(this.blob, other.blob) + && Objects.equals(this.requestOptions, other.requestOptions) + && Objects.equals(this.lastEtag, other.lastEtag) + && this.position == other.position + && this.isOpen == other.isOpen + && this.endOfStream == other.endOfStream + && this.chunkSize == other.chunkSize; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("blob", blob) + .add("position", position) + .add("isOpen", isOpen) + .add("endOfStream", endOfStream) + .toString(); + } + } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannelImpl.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannelImpl.java deleted file mode 100644 index 7731d04837a6..000000000000 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobReadChannelImpl.java +++ /dev/null @@ -1,259 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.gcloud.storage; - -import static com.google.gcloud.RetryHelper.runWithRetries; - -import com.google.api.services.storage.model.StorageObject; -import com.google.common.base.MoreObjects; -import com.google.gcloud.RestorableState; -import com.google.gcloud.RetryHelper; -import com.google.gcloud.spi.StorageRpc; - -import java.io.IOException; -import java.io.Serializable; -import java.nio.ByteBuffer; -import java.util.Map; -import java.util.Objects; -import java.util.concurrent.Callable; - -/** - * Default implementation for BlobReadChannel. - */ -class BlobReadChannelImpl implements BlobReadChannel { - - private static final int DEFAULT_CHUNK_SIZE = 2 * 1024 * 1024; - - private final StorageOptions serviceOptions; - private final BlobId blob; - private final Map requestOptions; - private int position; - private boolean isOpen; - private boolean endOfStream; - private int chunkSize = DEFAULT_CHUNK_SIZE; - - private final StorageRpc storageRpc; - private final StorageObject storageObject; - private int bufferPos; - private byte[] buffer; - - BlobReadChannelImpl(StorageOptions serviceOptions, BlobId blob, - Map requestOptions) { - this.serviceOptions = serviceOptions; - this.blob = blob; - this.requestOptions = requestOptions; - isOpen = true; - storageRpc = serviceOptions.storageRpc(); - storageObject = blob.toPb(); - } - - @Override - public RestorableState save() { - StateImpl.Builder builder = StateImpl.builder(serviceOptions, blob, requestOptions) - .position(position) - .isOpen(isOpen) - .endOfStream(endOfStream) - .chunkSize(chunkSize); - if (buffer != null) { - builder.position(position + bufferPos); - builder.endOfStream(false); - } - return builder.build(); - } - - @Override - public boolean isOpen() { - return isOpen; - } - - @Override - public void close() { - if (isOpen) { - buffer = null; - isOpen = false; - } - } - - private void validateOpen() throws IOException { - if (!isOpen) { - throw new IOException("stream is closed"); - } - } - - @Override - public void seek(int position) throws IOException { - validateOpen(); - this.position = position; - buffer = null; - bufferPos = 0; - endOfStream = false; - } - - @Override - public void chunkSize(int chunkSize) { - this.chunkSize = chunkSize <= 0 ? DEFAULT_CHUNK_SIZE : chunkSize; - } - - @Override - public int read(ByteBuffer byteBuffer) throws IOException { - validateOpen(); - if (buffer == null) { - if (endOfStream) { - return -1; - } - final int toRead = Math.max(byteBuffer.remaining(), chunkSize); - try { - buffer = runWithRetries(new Callable() { - @Override - public byte[] call() { - return storageRpc.read(storageObject, requestOptions, position, toRead); - } - }, serviceOptions.retryParams(), StorageImpl.EXCEPTION_HANDLER); - } catch (RetryHelper.RetryHelperException e) { - throw StorageException.translateAndThrow(e); - } - if (toRead > buffer.length) { - endOfStream = true; - if (buffer.length == 0) { - buffer = null; - return -1; - } - } - } - int toWrite = Math.min(buffer.length - bufferPos, byteBuffer.remaining()); - byteBuffer.put(buffer, bufferPos, toWrite); - bufferPos += toWrite; - if (bufferPos >= buffer.length) { - position += buffer.length; - buffer = null; - bufferPos = 0; - } - return toWrite; - } - - static class StateImpl implements RestorableState, Serializable { - - private static final long serialVersionUID = 3889420316004453706L; - - private final StorageOptions serviceOptions; - private final BlobId blob; - private final Map requestOptions; - private final int position; - private final boolean isOpen; - private final boolean endOfStream; - private final int chunkSize; - - StateImpl(Builder builder) { - this.serviceOptions = builder.serviceOptions; - this.blob = builder.blob; - this.requestOptions = builder.requestOptions; - this.position = builder.position; - this.isOpen = builder.isOpen; - this.endOfStream = builder.endOfStream; - this.chunkSize = builder.chunkSize; - } - - static class Builder { - private final StorageOptions serviceOptions; - private final BlobId blob; - private final Map requestOptions; - private int position; - private boolean isOpen; - private boolean endOfStream; - private int chunkSize; - - private Builder(StorageOptions options, BlobId blob, Map reqOptions) { - this.serviceOptions = options; - this.blob = blob; - this.requestOptions = reqOptions; - } - - Builder position(int position) { - this.position = position; - return this; - } - - Builder isOpen(boolean isOpen) { - this.isOpen = isOpen; - return this; - } - - Builder endOfStream(boolean endOfStream) { - this.endOfStream = endOfStream; - return this; - } - - Builder chunkSize(int chunkSize) { - this.chunkSize = chunkSize; - return this; - } - - RestorableState build() { - return new StateImpl(this); - } - } - - static Builder builder( - StorageOptions options, BlobId blob, Map reqOptions) { - return new Builder(options, blob, reqOptions); - } - - @Override - public BlobReadChannel restore() { - BlobReadChannelImpl channel = new BlobReadChannelImpl(serviceOptions, blob, requestOptions); - channel.position = position; - channel.isOpen = isOpen; - channel.endOfStream = endOfStream; - channel.chunkSize = chunkSize; - return channel; - } - - @Override - public int hashCode() { - return Objects.hash(serviceOptions, blob, requestOptions, position, isOpen, endOfStream, - chunkSize); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (!(obj instanceof StateImpl)) { - return false; - } - final StateImpl other = (StateImpl) obj; - return Objects.equals(this.serviceOptions, other.serviceOptions) - && Objects.equals(this.blob, other.blob) - && Objects.equals(this.requestOptions, other.requestOptions) - && this.position == other.position - && this.isOpen == other.isOpen - && this.endOfStream == other.endOfStream - && this.chunkSize == other.chunkSize; - } - - @Override - public String toString() { - return MoreObjects.toStringHelper(this) - .add("blob", blob) - .add("position", position) - .add("isOpen", isOpen) - .add("endOfStream", endOfStream) - .toString(); - } - } -} diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java index be3ef2293ec3..d1d12ec77638 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannel.java @@ -16,33 +16,77 @@ package com.google.gcloud.storage; +import static com.google.gcloud.RetryHelper.runWithRetries; +import static java.util.concurrent.Executors.callable; + +import com.google.gcloud.BaseWriteChannel; import com.google.gcloud.RestorableState; +import com.google.gcloud.RetryHelper; +import com.google.gcloud.WriteChannel; +import com.google.gcloud.spi.StorageRpc; -import java.io.Closeable; -import java.nio.channels.WritableByteChannel; +import java.util.Map; /** - * A channel for writing data to a Google Cloud Storage object. - * - * Implementations of this class may further buffer data internally to reduce remote calls. Written - * data will only be visible after calling {@link #close()}. This class is serializable, to allow - * incremental writes. + * Write channel implementation to upload Google Cloud Storage blobs. */ -public interface BlobWriteChannel extends WritableByteChannel, Closeable { - - /** - * Sets the minimum size that will be written by a single RPC. - * Written data will be buffered and only flushed upon reaching this size or closing the channel. - */ - void chunkSize(int chunkSize); - - /** - * Saves the write channel state so that it can be restored afterwards. The original - * {@code BlobWriteChannel} and the restored one should not both be used. Closing one channel - * causes the other channel to close, subsequent writes will fail. - * - * @return a {@link RestorableState} object that contains the write channel state and can restore - * it afterwards. State object must implement {@link java.io.Serializable}. - */ - public RestorableState save(); +class BlobWriteChannel extends BaseWriteChannel { + + BlobWriteChannel(StorageOptions options, BlobInfo blob, Map optionsMap) { + this(options, blob, options.rpc().open(blob.toPb(), optionsMap)); + } + + BlobWriteChannel(StorageOptions options, BlobInfo blobInfo, String uploadId) { + super(options, blobInfo, uploadId); + } + + @Override + protected void flushBuffer(final int length, final boolean last) { + try { + runWithRetries(callable(new Runnable() { + @Override + public void run() { + options().rpc().write(uploadId(), buffer(), 0, position(), length, last); + } + }), options().retryParams(), StorageImpl.EXCEPTION_HANDLER); + } catch (RetryHelper.RetryHelperException e) { + throw StorageException.translateAndThrow(e); + } + } + + protected StateImpl.Builder stateBuilder() { + return StateImpl.builder(options(), entity(), uploadId()); + } + + static class StateImpl extends BaseWriteChannel.BaseState { + + private static final long serialVersionUID = -9028324143780151286L; + + StateImpl(Builder builder) { + super(builder); + } + + static class Builder extends BaseWriteChannel.BaseState.Builder { + + private Builder(StorageOptions options, BlobInfo blobInfo, String uploadId) { + super(options, blobInfo, uploadId); + } + + @Override + public RestorableState build() { + return new StateImpl(this); + } + } + + static Builder builder(StorageOptions options, BlobInfo blobInfo, String uploadId) { + return new Builder(options, blobInfo, uploadId); + } + + @Override + public WriteChannel restore() { + BlobWriteChannel channel = new BlobWriteChannel(serviceOptions, entity, uploadId); + channel.restore(this); + return channel; + } + } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannelImpl.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannelImpl.java deleted file mode 100644 index 1c841d1dfc6a..000000000000 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BlobWriteChannelImpl.java +++ /dev/null @@ -1,274 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.gcloud.storage; - -import static com.google.gcloud.RetryHelper.runWithRetries; -import static java.util.concurrent.Executors.callable; - -import com.google.api.services.storage.model.StorageObject; -import com.google.common.base.MoreObjects; -import com.google.gcloud.RestorableState; -import com.google.gcloud.RetryHelper; -import com.google.gcloud.spi.StorageRpc; - -import java.io.IOException; -import java.io.Serializable; -import java.nio.ByteBuffer; -import java.util.Arrays; -import java.util.Map; -import java.util.Objects; - -/** - * Default implementation for BlobWriteChannel. - */ -class BlobWriteChannelImpl implements BlobWriteChannel { - - private static final long serialVersionUID = 8675286882724938737L; - private static final int MIN_CHUNK_SIZE = 256 * 1024; - private static final int DEFAULT_CHUNK_SIZE = 8 * MIN_CHUNK_SIZE; - - private final StorageOptions options; - private final BlobInfo blobInfo; - private final String uploadId; - private int position; - private byte[] buffer = new byte[0]; - private int limit; - private boolean isOpen = true; - private int chunkSize = DEFAULT_CHUNK_SIZE; - - private final StorageRpc storageRpc; - private final StorageObject storageObject; - - BlobWriteChannelImpl(StorageOptions options, BlobInfo blobInfo, - Map optionsMap) { - this.options = options; - this.blobInfo = blobInfo; - storageRpc = options.storageRpc(); - storageObject = blobInfo.toPb(); - uploadId = storageRpc.open(storageObject, optionsMap); - } - - BlobWriteChannelImpl(StorageOptions options, BlobInfo blobInfo, String uploadId) { - this.options = options; - this.blobInfo = blobInfo; - this.uploadId = uploadId; - storageRpc = options.storageRpc(); - storageObject = blobInfo.toPb(); - } - - @Override - public RestorableState save() { - byte[] bufferToSave = null; - if (isOpen) { - flush(); - bufferToSave = Arrays.copyOf(buffer, limit); - } - return StateImpl.builder(options, blobInfo, uploadId) - .position(position) - .buffer(bufferToSave) - .isOpen(isOpen) - .chunkSize(chunkSize) - .build(); - } - - private void flush() { - if (limit >= chunkSize) { - final int length = limit - limit % MIN_CHUNK_SIZE; - try { - runWithRetries(callable(new Runnable() { - @Override - public void run() { - storageRpc.write(uploadId, buffer, 0, storageObject, position, length, false); - } - }), options.retryParams(), StorageImpl.EXCEPTION_HANDLER); - } catch (RetryHelper.RetryHelperException e) { - throw StorageException.translateAndThrow(e); - } - position += length; - limit -= length; - byte[] temp = new byte[chunkSize]; - System.arraycopy(buffer, length, temp, 0, limit); - buffer = temp; - } - } - - private void validateOpen() throws IOException { - if (!isOpen) { - throw new IOException("stream is closed"); - } - } - - @Override - public int write(ByteBuffer byteBuffer) throws IOException { - validateOpen(); - int toWrite = byteBuffer.remaining(); - int spaceInBuffer = buffer.length - limit; - if (spaceInBuffer >= toWrite) { - byteBuffer.get(buffer, limit, toWrite); - } else { - buffer = Arrays.copyOf(buffer, Math.max(chunkSize, buffer.length + toWrite - spaceInBuffer)); - byteBuffer.get(buffer, limit, toWrite); - } - limit += toWrite; - flush(); - return toWrite; - } - - @Override - public boolean isOpen() { - return isOpen; - } - - @Override - public void close() throws IOException { - if (isOpen) { - try { - runWithRetries(callable(new Runnable() { - @Override - public void run() { - storageRpc.write(uploadId, buffer, 0, storageObject, position, limit, true); - } - }), options.retryParams(), StorageImpl.EXCEPTION_HANDLER); - } catch (RetryHelper.RetryHelperException e) { - throw StorageException.translateAndThrow(e); - } - position += buffer.length; - isOpen = false; - buffer = null; - } - } - - @Override - public void chunkSize(int chunkSize) { - chunkSize = (chunkSize / MIN_CHUNK_SIZE) * MIN_CHUNK_SIZE; - this.chunkSize = Math.max(MIN_CHUNK_SIZE, chunkSize); - } - - static class StateImpl implements RestorableState, Serializable { - - private static final long serialVersionUID = 8541062465055125619L; - - private final StorageOptions serviceOptions; - private final BlobInfo blobInfo; - private final String uploadId; - private final int position; - private final byte[] buffer; - private final boolean isOpen; - private final int chunkSize; - - StateImpl(Builder builder) { - this.serviceOptions = builder.serviceOptions; - this.blobInfo = builder.blobInfo; - this.uploadId = builder.uploadId; - this.position = builder.position; - this.buffer = builder.buffer; - this.isOpen = builder.isOpen; - this.chunkSize = builder.chunkSize; - } - - static class Builder { - private final StorageOptions serviceOptions; - private final BlobInfo blobInfo; - private final String uploadId; - private int position; - private byte[] buffer; - private boolean isOpen; - private int chunkSize; - - private Builder(StorageOptions options, BlobInfo blobInfo, String uploadId) { - this.serviceOptions = options; - this.blobInfo = blobInfo; - this.uploadId = uploadId; - } - - Builder position(int position) { - this.position = position; - return this; - } - - Builder buffer(byte[] buffer) { - this.buffer = buffer; - return this; - } - - Builder isOpen(boolean isOpen) { - this.isOpen = isOpen; - return this; - } - - Builder chunkSize(int chunkSize) { - this.chunkSize = chunkSize; - return this; - } - - RestorableState build() { - return new StateImpl(this); - } - } - - static Builder builder(StorageOptions options, BlobInfo blobInfo, String uploadId) { - return new Builder(options, blobInfo, uploadId); - } - - @Override - public BlobWriteChannel restore() { - BlobWriteChannelImpl channel = new BlobWriteChannelImpl(serviceOptions, blobInfo, uploadId); - if (buffer != null) { - channel.buffer = buffer.clone(); - channel.limit = buffer.length; - } - channel.position = position; - channel.isOpen = isOpen; - channel.chunkSize = chunkSize; - return channel; - } - - @Override - public int hashCode() { - return Objects.hash(serviceOptions, blobInfo, uploadId, position, isOpen, chunkSize, - Arrays.hashCode(buffer)); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (!(obj instanceof StateImpl)) { - return false; - } - final StateImpl other = (StateImpl) obj; - return Objects.equals(this.serviceOptions, other.serviceOptions) - && Objects.equals(this.blobInfo, other.blobInfo) - && Objects.equals(this.uploadId, other.uploadId) - && Objects.deepEquals(this.buffer, other.buffer) - && this.position == other.position - && this.isOpen == other.isOpen - && this.chunkSize == other.chunkSize; - } - - @Override - public String toString() { - return MoreObjects.toStringHelper(this) - .add("blobInfo", blobInfo) - .add("uploadId", uploadId) - .add("position", position) - .add("isOpen", isOpen) - .toString(); - } - } -} diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java index 3193259f68cb..3acd3f5d79b9 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java @@ -18,22 +18,35 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; +import static com.google.gcloud.storage.Bucket.BucketSourceOption.toGetOptions; +import static com.google.gcloud.storage.Bucket.BucketSourceOption.toSourceOptions; -import com.google.gcloud.storage.Storage.BlobSourceOption; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.Iterators; +import com.google.gcloud.Page; +import com.google.gcloud.PageImpl; +import com.google.gcloud.spi.StorageRpc; +import com.google.gcloud.storage.Storage.BlobGetOption; import com.google.gcloud.storage.Storage.BlobTargetOption; -import com.google.gcloud.storage.Storage.BucketSourceOption; +import com.google.gcloud.storage.Storage.BlobWriteOption; import com.google.gcloud.storage.Storage.BucketTargetOption; +import java.io.IOException; +import java.io.InputStream; +import java.io.ObjectInputStream; +import java.io.Serializable; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; +import java.util.Iterator; import java.util.List; import java.util.Objects; /** * A Google cloud storage bucket. * - *

      - * Objects of this class are immutable. Operations that modify the bucket like {@link #update} + *

      Objects of this class are immutable. Operations that modify the bucket like {@link #update} * return a new object. To get a {@code Bucket} object with the most recent information use * {@link #reload}. *

      @@ -43,6 +56,142 @@ public final class Bucket { private final Storage storage; private final BucketInfo info; + private static class BlobPageFetcher implements PageImpl.NextPageFetcher { + + private static final long serialVersionUID = 3221100177471323801L; + + private final StorageOptions options; + private final Page infoPage; + + BlobPageFetcher(StorageOptions options, Page infoPage) { + this.options = options; + this.infoPage = infoPage; + } + + @Override + public Page nextPage() { + Page nextInfoPage = infoPage.nextPage(); + return new PageImpl<>(new BlobPageFetcher(options, nextInfoPage), + nextInfoPage.nextPageCursor(), new LazyBlobIterable(options, nextInfoPage.values())); + } + } + + private static class LazyBlobIterable implements Iterable, Serializable { + + private static final long serialVersionUID = -3092290247725378832L; + + private final StorageOptions options; + private final Iterable infoIterable; + private transient Storage storage; + + public LazyBlobIterable(StorageOptions options, Iterable infoIterable) { + this.options = options; + this.infoIterable = infoIterable; + this.storage = options.service(); + } + + private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + this.storage = options.service(); + } + + @Override + public Iterator iterator() { + return Iterators.transform(infoIterable.iterator(), new Function() { + @Override + public Blob apply(BlobInfo blobInfo) { + return new Blob(storage, blobInfo); + } + }); + } + + @Override + public int hashCode() { + return Objects.hash(options, infoIterable); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof LazyBlobIterable)) { + return false; + } + LazyBlobIterable other = (LazyBlobIterable) obj; + return Objects.equals(options, other.options) + && Objects.equals(infoIterable, other.infoIterable); + } + } + + /** + * Class for specifying bucket source options when {@code Bucket} methods are used. + */ + public static class BucketSourceOption extends Option { + + private static final long serialVersionUID = 6928872234155522371L; + + private BucketSourceOption(StorageRpc.Option rpcOption) { + super(rpcOption, null); + } + + private Storage.BucketSourceOption toSourceOptions(BucketInfo bucketInfo) { + switch (rpcOption()) { + case IF_METAGENERATION_MATCH: + return Storage.BucketSourceOption.metagenerationMatch(bucketInfo.metageneration()); + case IF_METAGENERATION_NOT_MATCH: + return Storage.BucketSourceOption.metagenerationNotMatch(bucketInfo.metageneration()); + default: + throw new AssertionError("Unexpected enum value"); + } + } + + private Storage.BucketGetOption toGetOption(BucketInfo bucketInfo) { + switch (rpcOption()) { + case IF_METAGENERATION_MATCH: + return Storage.BucketGetOption.metagenerationMatch(bucketInfo.metageneration()); + case IF_METAGENERATION_NOT_MATCH: + return Storage.BucketGetOption.metagenerationNotMatch(bucketInfo.metageneration()); + default: + throw new AssertionError("Unexpected enum value"); + } + } + + /** + * Returns an option for bucket's metageneration match. If this option is used the request will + * fail if metageneration does not match. + */ + public static BucketSourceOption metagenerationMatch() { + return new BucketSourceOption(StorageRpc.Option.IF_METAGENERATION_MATCH); + } + + /** + * Returns an option for bucket's metageneration mismatch. If this option is used the request + * will fail if metageneration matches. + */ + public static BucketSourceOption metagenerationNotMatch() { + return new BucketSourceOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH); + } + + static Storage.BucketSourceOption[] toSourceOptions(BucketInfo bucketInfo, + BucketSourceOption... options) { + Storage.BucketSourceOption[] convertedOptions = + new Storage.BucketSourceOption[options.length]; + int index = 0; + for (BucketSourceOption option : options) { + convertedOptions[index++] = option.toSourceOptions(bucketInfo); + } + return convertedOptions; + } + + static Storage.BucketGetOption[] toGetOptions(BucketInfo bucketInfo, + BucketSourceOption... options) { + Storage.BucketGetOption[] convertedOptions = new Storage.BucketGetOption[options.length]; + int index = 0; + for (BucketSourceOption option : options) { + convertedOptions[index++] = option.toGetOption(bucketInfo); + } + return convertedOptions; + } + } + /** * Constructs a {@code Bucket} object for the provided {@code BucketInfo}. The storage service is * used to issue requests. @@ -61,11 +210,12 @@ public Bucket(Storage storage, BucketInfo info) { * * @param storage the storage service used for issuing requests * @param bucket bucket's name - * @return the {@code Bucket} object or {@code null} if not found. + * @param options blob get options + * @return the {@code Bucket} object or {@code null} if not found * @throws StorageException upon failure */ - public static Bucket load(Storage storage, String bucket) { - BucketInfo info = storage.get(bucket); + public static Bucket get(Storage storage, String bucket, Storage.BucketGetOption... options) { + BucketInfo info = storage.get(bucket, options); return info != null ? new Bucket(storage, info) : null; } @@ -82,19 +232,22 @@ public BucketInfo info() { * @return true if this bucket exists, false otherwise * @throws StorageException upon failure */ - public boolean exists() { - return storage.get(info.name()) != null; + public boolean exists(BucketSourceOption... options) { + int length = options.length; + Storage.BucketGetOption[] getOptions = Arrays.copyOf(toGetOptions(info, options), length + 1); + getOptions[length] = Storage.BucketGetOption.fields(); + return storage.get(info.name(), getOptions) != null; } /** - * Fetches current bucket's latest information. + * Fetches current bucket's latest information. Returns {@code null} if the bucket does not exist. * * @param options bucket read options - * @return a {@code Bucket} object with latest information + * @return a {@code Bucket} object with latest information or {@code null} if not found * @throws StorageException upon failure */ public Bucket reload(BucketSourceOption... options) { - return new Bucket(storage, storage.get(info.name(), options)); + return Bucket.get(storage, info.name(), toGetOptions(info, options)); } /** @@ -118,11 +271,11 @@ public Bucket update(BucketInfo bucketInfo, BucketTargetOption... options) { * Deletes this bucket. * * @param options bucket delete options - * @return true if bucket was deleted + * @return {@code true} if bucket was deleted, {@code false} if it was not found * @throws StorageException upon failure */ public boolean delete(BucketSourceOption... options) { - return storage.delete(info.name(), options); + return storage.delete(info.name(), toSourceOptions(info, options)); } /** @@ -131,8 +284,11 @@ public boolean delete(BucketSourceOption... options) { * @param options options for listing blobs * @throws StorageException upon failure */ - public ListResult list(Storage.BlobListOption... options) { - return new BlobListResult(storage, storage.list(info.name(), options)); + public Page list(Storage.BlobListOption... options) { + Page infoPage = storage.list(info.name(), options); + StorageOptions storageOptions = storage.options(); + return new PageImpl<>(new BlobPageFetcher(storageOptions, infoPage), infoPage.nextPageCursor(), + new LazyBlobIterable(storageOptions, infoPage.values())); } /** @@ -142,7 +298,7 @@ public ListResult list(Storage.BlobListOption... options) { * @param options blob search options * @throws StorageException upon failure */ - public Blob get(String blob, BlobSourceOption... options) { + public Blob get(String blob, BlobGetOption... options) { return new Blob(storage, storage.get(BlobId.of(info.name(), blob), options)); } @@ -152,7 +308,7 @@ public Blob get(String blob, BlobSourceOption... options) { * @param blobName1 first blob to get * @param blobName2 second blob to get * @param blobNames other blobs to get - * @return an immutable list of {@code Blob} objects. + * @return an immutable list of {@code Blob} objects * @throws StorageException upon failure */ public List get(String blobName1, String blobName2, String... blobNames) { @@ -163,7 +319,7 @@ public List get(String blobName1, String blobName2, String... blobNames) { batch.get(info.name(), name); } List blobs = new ArrayList<>(blobNames.length); - BatchResponse response = storage.apply(batch.build()); + BatchResponse response = storage.submit(batch.build()); for (BatchResponse.Result result : response.gets()) { BlobInfo blobInfo = result.get(); blobs.add(blobInfo != null ? new Blob(storage, blobInfo) : null); @@ -172,17 +328,43 @@ public List get(String blobName1, String blobName2, String... blobNames) { } /** - * Creates a new blob in this bucket. + * Creates a new blob in this bucket. Direct upload is used to upload {@code content}. + * For large content, {@link Blob#writer(com.google.gcloud.storage.Storage.BlobWriteOption...)} + * is recommended as it uses resumable upload. MD5 and CRC32C hashes of {@code content} are + * computed and used for validating transferred data. * * @param blob a blob name * @param content the blob content + * @param contentType the blob content type. If {@code null} then + * {@value com.google.gcloud.storage.Storage#DEFAULT_CONTENT_TYPE} is used. + * @param options options for blob creation + * @return a complete blob information + * @throws StorageException upon failure + */ + public Blob create(String blob, byte[] content, String contentType, BlobTargetOption... options) { + BlobInfo blobInfo = BlobInfo.builder(BlobId.of(info.name(), blob)) + .contentType(MoreObjects.firstNonNull(contentType, Storage.DEFAULT_CONTENT_TYPE)).build(); + return new Blob(storage, storage.create(blobInfo, content, options)); + } + + /** + * Creates a new blob in this bucket. Direct upload is used to upload {@code content}. + * For large content, {@link Blob#writer(com.google.gcloud.storage.Storage.BlobWriteOption...)} + * is recommended as it uses resumable upload. + * + * @param blob a blob name + * @param content the blob content as a stream + * @param contentType the blob content type. If {@code null} then + * {@value com.google.gcloud.storage.Storage#DEFAULT_CONTENT_TYPE} is used. * @param options options for blob creation - * @return a complete blob information. + * @return a complete blob information * @throws StorageException upon failure */ - Blob create(String blob, byte[] content, BlobTargetOption... options) { - BlobId blobId = BlobId.of(info.name(), blob); - return new Blob(storage, storage.create(BlobInfo.builder(blobId).build(), content, options)); + public Blob create(String blob, InputStream content, String contentType, + BlobWriteOption... options) { + BlobInfo blobInfo = BlobInfo.builder(BlobId.of(info.name(), blob)) + .contentType(MoreObjects.firstNonNull(contentType, Storage.DEFAULT_CONTENT_TYPE)).build(); + return new Blob(storage, storage.create(blobInfo, content, options)); } /** diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BucketInfo.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BucketInfo.java index ddd4665ca129..62fbf9c6521f 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BucketInfo.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/BucketInfo.java @@ -33,7 +33,6 @@ import com.google.common.base.Function; import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; import com.google.gcloud.storage.Acl.Entity; import java.io.IOException; @@ -80,10 +79,16 @@ public com.google.api.services.storage.model.Bucket apply(BucketInfo bucketInfo) private final List cors; private final List acl; private final List defaultAcl; - private final Location location; - private final StorageClass storageClass; + private final String location; + private final String storageClass; - public static abstract class DeleteRule implements Serializable { + /** + * Base class for bucket's delete rules. Allows to configure automatic deletion of blobs and blobs + * versions. + * + * @see Object Lifecycle Management + */ + public abstract static class DeleteRule implements Serializable { private static final long serialVersionUID = 3137971668395933033L; private static final String SUPPORTED_ACTION = "Delete"; @@ -153,11 +158,23 @@ static DeleteRule fromPb(Rule rule) { } } + /** + * Delete rule class that sets a Time To Live for blobs in the bucket. + * + * @see Object Lifecycle Management + */ public static class AgeDeleteRule extends DeleteRule { private static final long serialVersionUID = 5697166940712116380L; private final int daysToLive; + /** + * Creates an {@code AgeDeleteRule} object. + * + * @param daysToLive blobs' Time To Live expressed in days. The time when the age condition is + * considered to be satisfied is computed by adding {@code daysToLive} days to the + * midnight following blob's creation time in UTC. + */ public AgeDeleteRule(int daysToLive) { super(Type.AGE); this.daysToLive = daysToLive; @@ -200,16 +217,28 @@ private void readObject(ObjectInputStream in) throws IOException, rule = new JacksonFactory().fromString(in.readUTF(), Rule.class); } + @Override Rule toPb() { return rule; } } + /** + * Delete rule class for blobs in the bucket that have been created before a certain date. + * + * @see Object Lifecycle Management + */ public static class CreatedBeforeDeleteRule extends DeleteRule { private static final long serialVersionUID = 881692650279195867L; private final long timeMillis; + /** + * Creates an {@code CreatedBeforeDeleteRule} object. + * + * @param timeMillis a date in UTC. Blobs that have been created before midnight of the provided + * date meet the delete condition + */ public CreatedBeforeDeleteRule(long timeMillis) { super(Type.CREATE_BEFORE); this.timeMillis = timeMillis; @@ -225,11 +254,23 @@ void populateCondition(Rule.Condition condition) { } } + /** + * Delete rule class for versioned blobs. Specifies when to delete a blob's version according to + * the number of available newer versions for that blob. + * + * @see Object Lifecycle Management + */ public static class NumNewerVersionsDeleteRule extends DeleteRule { private static final long serialVersionUID = -1955554976528303894L; private final int numNewerVersions; + /** + * Creates an {@code NumNewerVersionsDeleteRule} object. + * + * @param numNewerVersions the number of newer versions. A blob's version meets the delete + * condition when {@code numNewerVersions} newer versions are available. + */ public NumNewerVersionsDeleteRule(int numNewerVersions) { super(Type.NUM_NEWER_VERSIONS); this.numNewerVersions = numNewerVersions; @@ -245,11 +286,22 @@ void populateCondition(Rule.Condition condition) { } } + /** + * Delete rule class to distinguish between live and archived blobs. + * + * @see Object Lifecycle Management + */ public static class IsLiveDeleteRule extends DeleteRule { private static final long serialVersionUID = -3502994563121313364L; private final boolean isLive; + /** + * Creates an {@code IsLiveDeleteRule} object. + * + * @param isLive if set to {@code true} live blobs meet the delete condition. If set to + * {@code false} delete condition is met by archived blobs. + */ public IsLiveDeleteRule(boolean isLive) { super(Type.IS_LIVE); this.isLive = isLive; @@ -265,135 +317,7 @@ void populateCondition(Rule.Condition condition) { } } - public static final class StorageClass implements Serializable { - - private static final long serialVersionUID = 374002156285326563L; - private static final ImmutableMap STRING_TO_OPTION; - private static final StorageClass NULL_VALUE = - new StorageClass(Data.nullOf(String.class)); - - private final String value; - - public enum Option { - DURABLE_REDUCED_AVAILABILITY, STANDARD; - - private final StorageClass storageClass; - - Option() { - storageClass = new StorageClass(name()); - } - } - - private StorageClass(String value) { - this.value = checkNotNull(value); - } - - @Override - public String toString() { - return value(); - } - - public String value() { - return value; - } - - public static StorageClass standard() { - return Option.STANDARD.storageClass; - } - - public static StorageClass durableReducedAvailability() { - return Option.DURABLE_REDUCED_AVAILABILITY.storageClass; - } - - public static StorageClass of(String value) { - Option option = STRING_TO_OPTION.get(value.toUpperCase()); - return option == null ? new StorageClass(value) : option.storageClass; - } - - static { - ImmutableMap.Builder map = ImmutableMap.builder(); - for (Option option : Option.values()) { - map.put(option.name(), option); - } - STRING_TO_OPTION = map.build(); - } - } - - public static final class Location implements Serializable { - - private static final long serialVersionUID = 9073107666838637662L; - private static final ImmutableMap STRING_TO_OPTION; - private static final Location NULL_VALUE = new Location(Data.nullOf(String.class)); - - private final String value; - - public enum Option { - US, EU, ASIA; - - private final Location location; - - Option() { - location = new Location(name()); - } - } - - private Location(String value) { - this.value = checkNotNull(value); - } - - @Override - public int hashCode() { - return Objects.hash(value); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - final Location other = (Location) obj; - return Objects.equals(this.value, other.value); - } - - @Override - public String toString() { - return value(); - } - - public String value() { - return value; - } - - public static Location us() { - return Option.US.location; - } - - public static Location eu() { - return Option.EU.location; - } - - public static Location asia() { - return Option.ASIA.location; - } - - public static Location of(String value) { - Option option = STRING_TO_OPTION.get(value.toUpperCase()); - return option == null ? new Location(value) : option.location; - } - - static { - ImmutableMap.Builder map = ImmutableMap.builder(); - for (Option option : Option.values()) { - map.put(option.name(), option); - } - STRING_TO_OPTION = map.build(); - } - } - - public final static class Builder { + public static final class Builder { private String id; private String name; @@ -402,18 +326,40 @@ public final static class Builder { private Boolean versioningEnabled; private String indexPage; private String notFoundPage; - private ImmutableList deleteRules; - private StorageClass storageClass; - private Location location; + private List deleteRules; + private String storageClass; + private String location; private String etag; private Long createTime; private Long metageneration; - private ImmutableList cors; - private ImmutableList acl; - private ImmutableList defaultAcl; + private List cors; + private List acl; + private List defaultAcl; private Builder() {} + private Builder(BucketInfo bucketInfo) { + id = bucketInfo.id; + name = bucketInfo.name; + etag = bucketInfo.etag; + createTime = bucketInfo.createTime; + metageneration = bucketInfo.metageneration; + location = bucketInfo.location; + storageClass = bucketInfo.storageClass; + cors = bucketInfo.cors; + acl = bucketInfo.acl; + defaultAcl = bucketInfo.defaultAcl; + owner = bucketInfo.owner; + selfLink = bucketInfo.selfLink; + versioningEnabled = bucketInfo.versioningEnabled; + indexPage = bucketInfo.indexPage; + notFoundPage = bucketInfo.notFoundPage; + deleteRules = bucketInfo.deleteRules; + } + + /** + * Sets the bucket's name. + */ public Builder name(String name) { this.name = checkNotNull(name); return this; @@ -434,33 +380,59 @@ Builder selfLink(String selfLink) { return this; } + /** + * Sets whether versioning should be enabled for this bucket. When set to true, versioning is + * fully enabled. + */ public Builder versioningEnabled(Boolean enable) { this.versioningEnabled = firstNonNull(enable, Data.nullOf(Boolean.class)); return this; } + /** + * Sets the bucket's website index page. Behaves as the bucket's directory index where missing + * blobs are treated as potential directories. + */ public Builder indexPage(String indexPage) { this.indexPage = indexPage; return this; } + /** + * Sets the custom object to return when a requested resource is not found. + */ public Builder notFoundPage(String notFoundPage) { this.notFoundPage = notFoundPage; return this; } + /** + * Sets the bucket's lifecycle configuration as a number of delete rules. + * + * @see Lifecycle Management + */ public Builder deleteRules(Iterable rules) { this.deleteRules = rules != null ? ImmutableList.copyOf(rules) : null; return this; } - public Builder storageClass(StorageClass storageClass) { - this.storageClass = firstNonNull(storageClass, StorageClass.NULL_VALUE); + /** + * Sets the bucket's storage class. This defines how blobs in the bucket are stored and + * determines the SLA and the cost of storage. A list of supported values is available + * here. + */ + public Builder storageClass(String storageClass) { + this.storageClass = storageClass; return this; } - public Builder location(Location location) { - this.location = firstNonNull(location, Location.NULL_VALUE); + /** + * Sets the bucket's location. Data for blobs in the bucket resides in physical storage within + * this region. A list of supported values is available + * here. + */ + public Builder location(String location) { + this.location = location; return this; } @@ -479,21 +451,43 @@ Builder metageneration(Long metageneration) { return this; } + /** + * Sets the bucket's Cross-Origin Resource Sharing (CORS) configuration. + * + * @see + * Cross-Origin Resource Sharing (CORS) + */ public Builder cors(Iterable cors) { this.cors = cors != null ? ImmutableList.copyOf(cors) : null; return this; } + /** + * Sets the bucket's access control configuration. + * + * @see + * About Access Control Lists + */ public Builder acl(Iterable acl) { this.acl = acl != null ? ImmutableList.copyOf(acl) : null; return this; } + /** + * Sets the default access control configuration to apply to bucket's blobs when no other + * configuration is specified. + * + * @see + * About Access Control Lists + */ public Builder defaultAcl(Iterable acl) { this.defaultAcl = acl != null ? ImmutableList.copyOf(acl) : null; return this; } + /** + * Creates a {@code BucketInfo} object. + */ public BucketInfo build() { checkNotNull(name); return new BucketInfo(this); @@ -519,88 +513,143 @@ private BucketInfo(Builder builder) { deleteRules = builder.deleteRules; } + /** + * Returns the bucket's id. + */ public String id() { return id; } + /** + * Returns the bucket's name. + */ public String name() { return name; } + /** + * Returns the bucket's owner. This is always the project team's owner group. + */ public Entity owner() { return owner; } + /** + * Returns the URI of this bucket as a string. + */ public String selfLink() { return selfLink; } + /** + * Returns {@code true} if versioning is fully enabled for this bucket, {@code false} otherwise. + */ public Boolean versioningEnabled() { return Data.isNull(versioningEnabled) ? null : versioningEnabled; } + /** + * Returns bucket's website index page. Behaves as the bucket's directory index where missing + * blobs are treated as potential directories. + */ public String indexPage() { return indexPage; } + /** + * Returns the custom object to return when a requested resource is not found. + */ public String notFoundPage() { return notFoundPage; } + /** + * Returns bucket's lifecycle configuration as a number of delete rules. + * + * @see Lifecycle Management + */ public List deleteRules() { return deleteRules; } + /** + * Returns HTTP 1.1 Entity tag for the bucket. + * + * @see Entity Tags + */ public String etag() { return etag; } + /** + * Returns the time at which the bucket was created. + */ public Long createTime() { return createTime; } + /** + * Returns the metadata generation of this bucket. + */ public Long metageneration() { return metageneration; } - public Location location() { - return location == null || Data.isNull(location.value) ? null : location; - } - - public StorageClass storageClass() { - return storageClass == null || Data.isNull(storageClass.value) ? null : storageClass; - } - + /** + * Returns the bucket's location. Data for blobs in the bucket resides in physical storage within + * this region. + * + * @see Bucket Locations + */ + public String location() { + return location; + } + + /** + * Returns the bucket's storage class. This defines how blobs in the bucket are stored and + * determines the SLA and the cost of storage. + * + * @see Storage Classes + */ + public String storageClass() { + return storageClass; + } + + /** + * Returns the bucket's Cross-Origin Resource Sharing (CORS) configuration. + * + * @see + * Cross-Origin Resource Sharing (CORS) + */ public List cors() { return cors; } + /** + * Returns the bucket's access control configuration. + * + * @see + * About Access Control Lists + */ public List acl() { return acl; } + /** + * Returns the default access control configuration for this bucket's blobs. + * + * @see + * About Access Control Lists + */ public List defaultAcl() { return defaultAcl; } + /** + * Returns a builder for the current bucket. + */ public Builder toBuilder() { - return new Builder() - .name(name) - .id(id) - .createTime(createTime) - .etag(etag) - .metageneration(metageneration) - .cors(cors) - .acl(acl) - .defaultAcl(defaultAcl) - .location(location) - .storageClass(storageClass) - .owner(owner) - .selfLink(selfLink) - .versioningEnabled(versioningEnabled) - .indexPage(indexPage) - .notFoundPage(notFoundPage) - .deleteRules(deleteRules); + return new Builder(this); } @Override @@ -633,10 +682,10 @@ com.google.api.services.storage.model.Bucket toPb() { bucketPb.setMetageneration(metageneration); } if (location != null) { - bucketPb.setLocation(location.value()); + bucketPb.setLocation(location); } if (storageClass != null) { - bucketPb.setStorageClass(storageClass.value()); + bucketPb.setStorageClass(storageClass); } if (cors != null) { bucketPb.setCors(transform(cors, Cors.TO_PB_FUNCTION)); @@ -683,10 +732,16 @@ public Rule apply(DeleteRule deleteRule) { return bucketPb; } + /** + * Creates a {@code BucketInfo} object for the provided bucket name. + */ public static BucketInfo of(String name) { return builder(name).build(); } + /** + * Returns a {@code BucketInfo} builder where the bucket's name is set to the provided name. + */ public static Builder builder(String name) { return new Builder().name(name); } @@ -709,10 +764,10 @@ static BucketInfo fromPb(com.google.api.services.storage.model.Bucket bucketPb) builder.createTime(bucketPb.getTimeCreated().getValue()); } if (bucketPb.getLocation() != null) { - builder.location(Location.of(bucketPb.getLocation())); + builder.location(bucketPb.getLocation()); } if (bucketPb.getStorageClass() != null) { - builder.storageClass(StorageClass.of(bucketPb.getStorageClass())); + builder.storageClass(bucketPb.getStorageClass()); } if (bucketPb.getCors() != null) { builder.cors(transform(bucketPb.getCors(), Cors.FROM_PB_FUNCTION)); diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/CopyWriter.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/CopyWriter.java new file mode 100644 index 000000000000..1e5427a847d4 --- /dev/null +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/CopyWriter.java @@ -0,0 +1,276 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.storage; + +import static com.google.gcloud.RetryHelper.runWithRetries; + +import com.google.common.base.MoreObjects; +import com.google.gcloud.Restorable; +import com.google.gcloud.RestorableState; +import com.google.gcloud.RetryHelper; +import com.google.gcloud.spi.StorageRpc; +import com.google.gcloud.spi.StorageRpc.RewriteRequest; +import com.google.gcloud.spi.StorageRpc.RewriteResponse; + +import java.io.Serializable; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.Callable; + +/** + * Google Storage blob copy writer. This class holds the result of a copy request. If source and + * destination blobs share the same location and storage class the copy is completed in one RPC call + * otherwise one or more {@link #copyChunk} calls are necessary to complete the copy. In addition, + * {@link CopyWriter#result()} can be used to automatically complete the copy and return information + * on the newly created blob. + * + * @see Rewrite + */ +public class CopyWriter implements Restorable { + + private final StorageOptions serviceOptions; + private final StorageRpc storageRpc; + private RewriteResponse rewriteResponse; + + CopyWriter(StorageOptions serviceOptions, RewriteResponse rewriteResponse) { + this.serviceOptions = serviceOptions; + this.rewriteResponse = rewriteResponse; + this.storageRpc = serviceOptions.rpc(); + } + + /** + * Returns the updated information for the written blob. Calling this method when {@code isDone()} + * is {@code false} will block until all pending chunks are copied. + * + *

      This method has the same effect of doing: + *

          {@code while (!copyWriter.isDone()) {
      +   *        copyWriter.copyChunk();
      +   *    }}
      +   * 
      + * + * @throws StorageException upon failure + */ + public BlobInfo result() { + while (!isDone()) { + copyChunk(); + } + return BlobInfo.fromPb(rewriteResponse.result); + } + + /** + * Returns the size of the blob being copied. + */ + public long blobSize() { + return rewriteResponse.blobSize; + } + + /** + * Returns {@code true} if blob copy has finished, {@code false} otherwise. + */ + public boolean isDone() { + return rewriteResponse.isDone; + } + + /** + * Returns the number of bytes copied. + */ + public long totalBytesCopied() { + return rewriteResponse.totalBytesRewritten; + } + + /** + * Copies the next chunk of the blob. An RPC is issued only if copy has not finished yet + * ({@link #isDone} returns {@code false}). + * + * @throws StorageException upon failure + */ + public void copyChunk() { + if (!isDone()) { + try { + this.rewriteResponse = runWithRetries(new Callable() { + @Override + public RewriteResponse call() { + return storageRpc.continueRewrite(rewriteResponse); + } + }, serviceOptions.retryParams(), StorageImpl.EXCEPTION_HANDLER); + } catch (RetryHelper.RetryHelperException e) { + throw StorageException.translateAndThrow(e); + } + } + } + + @Override + public RestorableState capture() { + return StateImpl.builder( + serviceOptions, + BlobId.fromPb(rewriteResponse.rewriteRequest.source), + rewriteResponse.rewriteRequest.sourceOptions, + BlobInfo.fromPb(rewriteResponse.rewriteRequest.target), + rewriteResponse.rewriteRequest.targetOptions) + .blobSize(blobSize()) + .isDone(isDone()) + .megabytesCopiedPerChunk(rewriteResponse.rewriteRequest.megabytesRewrittenPerCall) + .rewriteToken(rewriteResponse.rewriteToken) + .totalBytesRewritten(totalBytesCopied()) + .build(); + } + + static class StateImpl implements RestorableState, Serializable { + + private static final long serialVersionUID = 8279287678903181701L; + + private final StorageOptions serviceOptions; + private final BlobId source; + private final Map sourceOptions; + private final BlobInfo target; + private final Map targetOptions; + private final BlobInfo result; + private final long blobSize; + private final boolean isDone; + private final String rewriteToken; + private final long totalBytesCopied; + private final Long megabytesCopiedPerChunk; + + StateImpl(Builder builder) { + this.serviceOptions = builder.serviceOptions; + this.source = builder.source; + this.sourceOptions = builder.sourceOptions; + this.target = builder.target; + this.targetOptions = builder.targetOptions; + this.result = builder.result; + this.blobSize = builder.blobSize; + this.isDone = builder.isDone; + this.rewriteToken = builder.rewriteToken; + this.totalBytesCopied = builder.totalBytesCopied; + this.megabytesCopiedPerChunk = builder.megabytesCopiedPerChunk; + } + + static class Builder { + + private final StorageOptions serviceOptions; + private final BlobId source; + private final Map sourceOptions; + private final BlobInfo target; + private final Map targetOptions; + private BlobInfo result; + private long blobSize; + private boolean isDone; + private String rewriteToken; + private long totalBytesCopied; + private Long megabytesCopiedPerChunk; + + private Builder(StorageOptions options, BlobId source, + Map sourceOptions, + BlobInfo target, Map targetOptions) { + this.serviceOptions = options; + this.source = source; + this.sourceOptions = sourceOptions; + this.target = target; + this.targetOptions = targetOptions; + } + + Builder result(BlobInfo result) { + this.result = result; + return this; + } + + Builder blobSize(long blobSize) { + this.blobSize = blobSize; + return this; + } + + Builder isDone(boolean isDone) { + this.isDone = isDone; + return this; + } + + Builder rewriteToken(String rewriteToken) { + this.rewriteToken = rewriteToken; + return this; + } + + Builder totalBytesRewritten(long totalBytesRewritten) { + this.totalBytesCopied = totalBytesRewritten; + return this; + } + + Builder megabytesCopiedPerChunk(Long megabytesCopiedPerChunk) { + this.megabytesCopiedPerChunk = megabytesCopiedPerChunk; + return this; + } + + RestorableState build() { + return new StateImpl(this); + } + } + + static Builder builder(StorageOptions options, BlobId source, + Map sourceOptions, BlobInfo target, + Map targetOptions) { + return new Builder(options, source, sourceOptions, target, targetOptions); + } + + @Override + public CopyWriter restore() { + RewriteRequest rewriteRequest = new RewriteRequest( + source.toPb(), sourceOptions, target.toPb(), targetOptions, megabytesCopiedPerChunk); + RewriteResponse rewriteResponse = new RewriteResponse(rewriteRequest, + result != null ? result.toPb() : null, blobSize, isDone, rewriteToken, + totalBytesCopied); + return new CopyWriter(serviceOptions, rewriteResponse); + } + + @Override + public int hashCode() { + return Objects.hash(serviceOptions, source, sourceOptions, target, targetOptions, result, + blobSize, isDone, megabytesCopiedPerChunk, rewriteToken, totalBytesCopied); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (!(obj instanceof StateImpl)) { + return false; + } + final StateImpl other = (StateImpl) obj; + return Objects.equals(this.serviceOptions, other.serviceOptions) + && Objects.equals(this.source, other.source) + && Objects.equals(this.sourceOptions, other.sourceOptions) + && Objects.equals(this.target, other.target) + && Objects.equals(this.targetOptions, other.targetOptions) + && Objects.equals(this.result, other.result) + && Objects.equals(this.rewriteToken, other.rewriteToken) + && Objects.equals(this.megabytesCopiedPerChunk, other.megabytesCopiedPerChunk) + && this.blobSize == other.blobSize + && this.isDone == other.isDone + && this.totalBytesCopied == other.totalBytesCopied; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("source", source) + .add("target", target) + .add("isDone", isDone) + .add("totalBytesRewritten", totalBytesCopied) + .add("blobSize", blobSize) + .toString(); + } + } +} diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Cors.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Cors.java index ce8cfb95b6e9..bcbbd1030dbc 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Cors.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Cors.java @@ -33,6 +33,9 @@ /** * Cross-Origin Resource Sharing (CORS) configuration for a bucket. + * + * @see + * Cross-Origin Resource Sharing (CORS) */ public final class Cors implements Serializable { @@ -57,6 +60,9 @@ public Bucket.Cors apply(Cors cors) { private final ImmutableList origins; private final ImmutableList responseHeaders; + /** + * Class for a CORS origin. + */ public static final class Origin implements Serializable { private static final long serialVersionUID = -4447958124895577993L; @@ -69,10 +75,16 @@ private Origin(String value) { this.value = checkNotNull(value); } + /** + * Returns an {@code Origin} object for all possible origins. + */ public static Origin any() { return ANY; } + /** + * Returns an {@code Origin} object for the given scheme, host and port. + */ public static Origin of(String scheme, String host, int port) { try { return of(new URI(scheme, null, host, port, null, null, null).toString()); @@ -81,6 +93,9 @@ public static Origin of(String scheme, String host, int port) { } } + /** + * Creates an {@code Origin} object for the provided value. + */ public static Origin of(String value) { if (ANY_URI.equals(value)) { return any(); @@ -98,7 +113,7 @@ public boolean equals(Object obj) { if (!(obj instanceof Origin)) { return false; } - return value.equals(((Origin)obj).value); + return value.equals(((Origin) obj).value); } @Override @@ -111,6 +126,9 @@ public String value() { } } + /** + * CORS configuration builder. + */ public static final class Builder { private Integer maxAgeSeconds; @@ -120,26 +138,42 @@ public static final class Builder { private Builder() {} + /** + * Sets the max time in seconds in which a client can issue requests before sending a new + * preflight request. + */ public Builder maxAgeSeconds(Integer maxAgeSeconds) { this.maxAgeSeconds = maxAgeSeconds; return this; } + /** + * Sets the HTTP methods supported by this CORS configuration. + */ public Builder methods(Iterable methods) { this.methods = methods != null ? ImmutableList.copyOf(methods) : null; return this; } + /** + * Sets the origins for this CORS configuration. + */ public Builder origins(Iterable origins) { this.origins = origins != null ? ImmutableList.copyOf(origins) : null; return this; } + /** + * Sets the response headers supported by this CORS configuration. + */ public Builder responseHeaders(Iterable headers) { this.responseHeaders = headers != null ? ImmutableList.copyOf(headers) : null; return this; } + /** + * Creates a CORS configuration. + */ public Cors build() { return new Cors(this); } @@ -152,22 +186,38 @@ private Cors(Builder builder) { this.responseHeaders = builder.responseHeaders; } + /** + * Returns the max time in seconds in which a client can issue requests before sending a new + * preflight request. + */ public Integer maxAgeSeconds() { return maxAgeSeconds; } + /** + * Returns the HTTP methods supported by this CORS configuration. + */ public List methods() { return methods; } + /** + * Returns the origins in this CORS configuration. + */ public List origins() { return origins; } + /** + * Returns the response headers supported by this CORS configuration. + */ public List responseHeaders() { return responseHeaders; } + /** + * Returns a builder for this CORS configuration. + */ public Builder toBuilder() { return builder() .maxAgeSeconds(maxAgeSeconds) @@ -193,6 +243,9 @@ public boolean equals(Object obj) { && Objects.equals(responseHeaders, other.responseHeaders); } + /** + * Returns a CORS configuration builder. + */ public static Builder builder() { return new Builder(); } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Option.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Option.java index 798db688c8ec..2ec8426bfa9f 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Option.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Option.java @@ -25,7 +25,7 @@ import java.util.Objects; /** - * Base class for Storage operation option + * Base class for Storage operation option. */ class Option implements Serializable { diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java index f9a1c00d4bec..b550015d0516 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java @@ -19,11 +19,16 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; +import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import com.google.common.collect.Sets; import com.google.gcloud.AuthCredentials.ServiceAccountAuthCredentials; +import com.google.gcloud.Page; +import com.google.gcloud.ReadChannel; import com.google.gcloud.Service; +import com.google.gcloud.WriteChannel; import com.google.gcloud.spi.StorageRpc; import com.google.gcloud.spi.StorageRpc.Tuple; @@ -32,6 +37,7 @@ import java.net.URL; import java.util.Arrays; import java.util.Collections; +import java.util.HashSet; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; @@ -46,6 +52,8 @@ */ public interface Storage extends Service { + String DEFAULT_CONTENT_TYPE = "application/octet-stream"; + enum PredefinedAcl { AUTHENTICATED_READ("authenticatedRead"), ALL_AUTHENTICATED_USERS("allAuthenticatedUsers"), @@ -67,6 +75,92 @@ String entry() { } } + enum BucketField { + ID("id"), + SELF_LINK("selfLink"), + NAME("name"), + TIME_CREATED("timeCreated"), + METAGENERATION("metageneration"), + ACL("acl"), + DEFAULT_OBJECT_ACL("defaultObjectAcl"), + OWNER("owner"), + LOCATION("location"), + WEBSITE("website"), + VERSIONING("versioning"), + CORS("cors"), + STORAGE_CLASS("storageClass"), + ETAG("etag"); + + private final String selector; + + BucketField(String selector) { + this.selector = selector; + } + + public String selector() { + return selector; + } + + static String selector(BucketField... fields) { + HashSet fieldStrings = Sets.newHashSetWithExpectedSize(fields.length + 1); + fieldStrings.add(NAME.selector()); + for (BucketField field : fields) { + fieldStrings.add(field.selector()); + } + return Joiner.on(',').join(fieldStrings); + } + } + + enum BlobField { + ACL("acl"), + BUCKET("bucket"), + CACHE_CONTROL("cacheControl"), + COMPONENT_COUNT("componentCount"), + CONTENT_DISPOSITION("contentDisposition"), + CONTENT_ENCODING("contentEncoding"), + CONTENT_LANGUAGE("contentLanguage"), + CONTENT_TYPE("contentType"), + CRC32C("crc32c"), + ETAG("etag"), + GENERATION("generation"), + ID("id"), + KIND("kind"), + MD5HASH("md5Hash"), + MEDIA_LINK("mediaLink"), + METADATA("metadata"), + METAGENERATION("metageneration"), + NAME("name"), + OWNER("owner"), + SELF_LINK("selfLink"), + SIZE("size"), + STORAGE_CLASS("storageClass"), + TIME_DELETED("timeDeleted"), + UPDATED("updated"); + + private final String selector; + + BlobField(String selector) { + this.selector = selector; + } + + public String selector() { + return selector; + } + + static String selector(BlobField... fields) { + HashSet fieldStrings = Sets.newHashSetWithExpectedSize(fields.length + 2); + fieldStrings.add(BUCKET.selector()); + fieldStrings.add(NAME.selector()); + for (BlobField field : fields) { + fieldStrings.add(field.selector()); + } + return Joiner.on(',').join(fieldStrings); + } + } + + /** + * Class for specifying bucket target options. + */ class BucketTargetOption extends Option { private static final long serialVersionUID = -5880204616982900975L; @@ -79,23 +173,40 @@ private BucketTargetOption(StorageRpc.Option rpcOption) { this(rpcOption, null); } + /** + * Returns an option for specifying bucket's predefined ACL configuration. + */ public static BucketTargetOption predefinedAcl(PredefinedAcl acl) { return new BucketTargetOption(StorageRpc.Option.PREDEFINED_ACL, acl.entry()); } + /** + * Returns an option for specifying bucket's default ACL configuration for blobs. + */ public static BucketTargetOption predefinedDefaultObjectAcl(PredefinedAcl acl) { return new BucketTargetOption(StorageRpc.Option.PREDEFINED_DEFAULT_OBJECT_ACL, acl.entry()); } + /** + * Returns an option for bucket's metageneration match. If this option is used the request will + * fail if metageneration does not match. + */ public static BucketTargetOption metagenerationMatch() { return new BucketTargetOption(StorageRpc.Option.IF_METAGENERATION_MATCH); } + /** + * Returns an option for bucket's metageneration mismatch. If this option is used the request + * will fail if metageneration matches. + */ public static BucketTargetOption metagenerationNotMatch() { return new BucketTargetOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH); } } + /** + * Class for specifying bucket source options. + */ class BucketSourceOption extends Option { private static final long serialVersionUID = 5185657617120212117L; @@ -104,15 +215,68 @@ private BucketSourceOption(StorageRpc.Option rpcOption, long metageneration) { super(rpcOption, metageneration); } + /** + * Returns an option for bucket's metageneration match. If this option is used the request will + * fail if bucket's metageneration does not match the provided value. + */ public static BucketSourceOption metagenerationMatch(long metageneration) { return new BucketSourceOption(StorageRpc.Option.IF_METAGENERATION_MATCH, metageneration); } + /** + * Returns an option for bucket's metageneration mismatch. If this option is used the request + * will fail if bucket's metageneration matches the provided value. + */ public static BucketSourceOption metagenerationNotMatch(long metageneration) { return new BucketSourceOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH, metageneration); } } + /** + * Class for specifying bucket source options. + */ + class BucketGetOption extends Option { + + private static final long serialVersionUID = 1901844869484087395L; + + private BucketGetOption(StorageRpc.Option rpcOption, long metageneration) { + super(rpcOption, metageneration); + } + + private BucketGetOption(StorageRpc.Option rpcOption, String value) { + super(rpcOption, value); + } + + /** + * Returns an option for bucket's metageneration match. If this option is used the request will + * fail if bucket's metageneration does not match the provided value. + */ + public static BucketGetOption metagenerationMatch(long metageneration) { + return new BucketGetOption(StorageRpc.Option.IF_METAGENERATION_MATCH, metageneration); + } + + /** + * Returns an option for bucket's metageneration mismatch. If this option is used the request + * will fail if bucket's metageneration matches the provided value. + */ + public static BucketGetOption metagenerationNotMatch(long metageneration) { + return new BucketGetOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH, metageneration); + } + + /** + * Returns an option to specify the bucket's fields to be returned by the RPC call. If this + * option is not provided all bucket's fields are returned. {@code BucketGetOption.fields}) can + * be used to specify only the fields of interest. Bucket name is always returned, even if not + * specified. + */ + public static BucketGetOption fields(BucketField... fields) { + return new BucketGetOption(StorageRpc.Option.FIELDS, BucketField.selector(fields)); + } + } + + /** + * Class for specifying blob target options. + */ class BlobTargetOption extends Option { private static final long serialVersionUID = 214616862061934846L; @@ -125,26 +289,48 @@ private BlobTargetOption(StorageRpc.Option rpcOption) { this(rpcOption, null); } + /** + * Returns an option for specifying blob's predefined ACL configuration. + */ public static BlobTargetOption predefinedAcl(PredefinedAcl acl) { return new BlobTargetOption(StorageRpc.Option.PREDEFINED_ACL, acl.entry()); } + /** + * Returns an option that causes an operation to succeed only if the target blob does not exist. + */ public static BlobTargetOption doesNotExist() { return new BlobTargetOption(StorageRpc.Option.IF_GENERATION_MATCH, 0L); } + /** + * Returns an option for blob's data generation match. If this option is used the request will + * fail if generation does not match. + */ public static BlobTargetOption generationMatch() { return new BlobTargetOption(StorageRpc.Option.IF_GENERATION_MATCH); } + /** + * Returns an option for blob's data generation mismatch. If this option is used the request + * will fail if generation matches. + */ public static BlobTargetOption generationNotMatch() { return new BlobTargetOption(StorageRpc.Option.IF_GENERATION_NOT_MATCH); } + /** + * Returns an option for blob's metageneration match. If this option is used the request will + * fail if metageneration does not match. + */ public static BlobTargetOption metagenerationMatch() { return new BlobTargetOption(StorageRpc.Option.IF_METAGENERATION_MATCH); } + /** + * Returns an option for blob's metageneration mismatch. If this option is used the request will + * fail if metageneration matches. + */ public static BlobTargetOption metagenerationNotMatch() { return new BlobTargetOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH); } @@ -170,6 +356,9 @@ static Tuple convert(BlobInfo info, BlobWriteOptio } } + /** + * Class for specifying blob write options. + */ class BlobWriteOption implements Serializable { private static final long serialVersionUID = -3880421670966224580L; @@ -216,64 +405,218 @@ public boolean equals(Object obj) { return this.option == other.option && Objects.equals(this.value, other.value); } + /** + * Returns an option for specifying blob's predefined ACL configuration. + */ public static BlobWriteOption predefinedAcl(PredefinedAcl acl) { return new BlobWriteOption(Option.PREDEFINED_ACL, acl.entry()); } + /** + * Returns an option that causes an operation to succeed only if the target blob does not exist. + */ public static BlobWriteOption doesNotExist() { return new BlobWriteOption(Option.IF_GENERATION_MATCH, 0L); } + /** + * Returns an option for blob's data generation match. If this option is used the request will + * fail if generation does not match. + */ public static BlobWriteOption generationMatch() { return new BlobWriteOption(Option.IF_GENERATION_MATCH); } + /** + * Returns an option for blob's data generation mismatch. If this option is used the request + * will fail if generation matches. + */ public static BlobWriteOption generationNotMatch() { return new BlobWriteOption(Option.IF_GENERATION_NOT_MATCH); } + /** + * Returns an option for blob's metageneration match. If this option is used the request will + * fail if metageneration does not match. + */ public static BlobWriteOption metagenerationMatch() { return new BlobWriteOption(Option.IF_METAGENERATION_MATCH); } + /** + * Returns an option for blob's metageneration mismatch. If this option is used the request will + * fail if metageneration matches. + */ public static BlobWriteOption metagenerationNotMatch() { return new BlobWriteOption(Option.IF_METAGENERATION_NOT_MATCH); } + /** + * Returns an option for blob's data MD5 hash match. If this option is used the request will + * fail if blobs' data MD5 hash does not match. + */ public static BlobWriteOption md5Match() { return new BlobWriteOption(Option.IF_MD5_MATCH, true); } + /** + * Returns an option for blob's data CRC32C checksum match. If this option is used the request + * will fail if blobs' data CRC32C checksum does not match. + */ public static BlobWriteOption crc32cMatch() { return new BlobWriteOption(Option.IF_CRC32C_MATCH, true); } } + /** + * Class for specifying blob source options. + */ class BlobSourceOption extends Option { private static final long serialVersionUID = -3712768261070182991L; - private BlobSourceOption(StorageRpc.Option rpcOption, long value) { + private BlobSourceOption(StorageRpc.Option rpcOption, Long value) { super(rpcOption, value); } + /** + * Returns an option for blob's data generation match. If this option is used the request will + * fail if blob's generation does not match. The generation value to compare with the actual + * blob's generation is taken from a source {@link BlobId} object. When this option is passed + * to a {@link Storage} method and {@link BlobId#generation()} is {@code null} or no + * {@link BlobId} is provided an exception is thrown. + */ + public static BlobSourceOption generationMatch() { + return new BlobSourceOption(StorageRpc.Option.IF_GENERATION_MATCH, null); + } + + /** + * Returns an option for blob's data generation match. If this option is used the request will + * fail if blob's generation does not match the provided value. + */ public static BlobSourceOption generationMatch(long generation) { return new BlobSourceOption(StorageRpc.Option.IF_GENERATION_MATCH, generation); } + /** + * Returns an option for blob's data generation mismatch. If this option is used the request + * will fail if blob's generation matches. The generation value to compare with the actual + * blob's generation is taken from a source {@link BlobId} object. When this option is passed + * to a {@link Storage} method and {@link BlobId#generation()} is {@code null} or no + * {@link BlobId} is provided an exception is thrown. + */ + public static BlobSourceOption generationNotMatch() { + return new BlobSourceOption(StorageRpc.Option.IF_GENERATION_NOT_MATCH, null); + } + + /** + * Returns an option for blob's data generation mismatch. If this option is used the request + * will fail if blob's generation matches the provided value. + */ public static BlobSourceOption generationNotMatch(long generation) { return new BlobSourceOption(StorageRpc.Option.IF_GENERATION_NOT_MATCH, generation); } + /** + * Returns an option for blob's metageneration match. If this option is used the request will + * fail if blob's metageneration does not match the provided value. + */ public static BlobSourceOption metagenerationMatch(long metageneration) { return new BlobSourceOption(StorageRpc.Option.IF_METAGENERATION_MATCH, metageneration); } + /** + * Returns an option for blob's metageneration mismatch. If this option is used the request will + * fail if blob's metageneration matches the provided value. + */ public static BlobSourceOption metagenerationNotMatch(long metageneration) { return new BlobSourceOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH, metageneration); } } + /** + * Class for specifying blob get options. + */ + class BlobGetOption extends Option { + + private static final long serialVersionUID = 803817709703661480L; + + private BlobGetOption(StorageRpc.Option rpcOption, Long value) { + super(rpcOption, value); + } + + private BlobGetOption(StorageRpc.Option rpcOption, String value) { + super(rpcOption, value); + } + + /** + * Returns an option for blob's data generation match. If this option is used the request will + * fail if blob's generation does not match. The generation value to compare with the actual + * blob's generation is taken from a source {@link BlobId} object. When this option is passed + * to a {@link Storage} method and {@link BlobId#generation()} is {@code null} or no + * {@link BlobId} is provided an exception is thrown. + */ + public static BlobGetOption generationMatch() { + return new BlobGetOption(StorageRpc.Option.IF_GENERATION_MATCH, (Long) null); + } + + /** + * Returns an option for blob's data generation match. If this option is used the request will + * fail if blob's generation does not match the provided value. + */ + public static BlobGetOption generationMatch(long generation) { + return new BlobGetOption(StorageRpc.Option.IF_GENERATION_MATCH, generation); + } + + /** + * Returns an option for blob's data generation mismatch. If this option is used the request + * will fail if blob's generation matches. The generation value to compare with the actual + * blob's generation is taken from a source {@link BlobId} object. When this option is passed + * to a {@link Storage} method and {@link BlobId#generation()} is {@code null} or no + * {@link BlobId} is provided an exception is thrown. + */ + public static BlobGetOption generationNotMatch() { + return new BlobGetOption(StorageRpc.Option.IF_GENERATION_NOT_MATCH, (Long) null); + } + + /** + * Returns an option for blob's data generation mismatch. If this option is used the request + * will fail if blob's generation matches the provided value. + */ + public static BlobGetOption generationNotMatch(long generation) { + return new BlobGetOption(StorageRpc.Option.IF_GENERATION_NOT_MATCH, generation); + } + + /** + * Returns an option for blob's metageneration match. If this option is used the request will + * fail if blob's metageneration does not match the provided value. + */ + public static BlobGetOption metagenerationMatch(long metageneration) { + return new BlobGetOption(StorageRpc.Option.IF_METAGENERATION_MATCH, metageneration); + } + + /** + * Returns an option for blob's metageneration mismatch. If this option is used the request will + * fail if blob's metageneration matches the provided value. + */ + public static BlobGetOption metagenerationNotMatch(long metageneration) { + return new BlobGetOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH, metageneration); + } + + /** + * Returns an option to specify the blob's fields to be returned by the RPC call. If this option + * is not provided all blob's fields are returned. {@code BlobGetOption.fields}) can be used to + * specify only the fields of interest. Blob name and bucket are always returned, even if not + * specified. + */ + public static BlobGetOption fields(BlobField... fields) { + return new BlobGetOption(StorageRpc.Option.FIELDS, BlobField.selector(fields)); + } + } + + /** + * Class for specifying bucket list options. + */ class BucketListOption extends Option { private static final long serialVersionUID = 8754017079673290353L; @@ -282,19 +625,44 @@ private BucketListOption(StorageRpc.Option option, Object value) { super(option, value); } + /** + * Returns an option to specify the maximum number of buckets to be returned. + */ public static BucketListOption maxResults(long maxResults) { return new BucketListOption(StorageRpc.Option.MAX_RESULTS, maxResults); } + /** + * Returns an option to specify the page token from which to start listing buckets. + */ public static BucketListOption startPageToken(String pageToken) { return new BucketListOption(StorageRpc.Option.PAGE_TOKEN, pageToken); } + /** + * Returns an option to set a prefix to filter results to buckets whose names begin with this + * prefix. + */ public static BucketListOption prefix(String prefix) { return new BucketListOption(StorageRpc.Option.PREFIX, prefix); } + + /** + * Returns an option to specify the bucket's fields to be returned by the RPC call. If this + * option is not provided all bucket's fields are returned. {@code BucketListOption.fields}) can + * be used to specify only the fields of interest. Bucket name is always returned, even if not + * specified. + */ + public static BucketListOption fields(BucketField... fields) { + StringBuilder builder = new StringBuilder(); + builder.append("items(").append(BucketField.selector(fields)).append(")"); + return new BucketListOption(StorageRpc.Option.FIELDS, builder.toString()); + } } + /** + * Class for specifying blob list options. + */ class BlobListOption extends Option { private static final long serialVersionUID = 9083383524788661294L; @@ -303,23 +671,51 @@ private BlobListOption(StorageRpc.Option option, Object value) { super(option, value); } + /** + * Returns an option to specify the maximum number of blobs to be returned. + */ public static BlobListOption maxResults(long maxResults) { return new BlobListOption(StorageRpc.Option.MAX_RESULTS, maxResults); } + /** + * Returns an option to specify the page token from which to start listing blobs. + */ public static BlobListOption startPageToken(String pageToken) { return new BlobListOption(StorageRpc.Option.PAGE_TOKEN, pageToken); } + /** + * Returns an option to set a prefix to filter results to blobs whose names begin with this + * prefix. + */ public static BlobListOption prefix(String prefix) { return new BlobListOption(StorageRpc.Option.PREFIX, prefix); } + /** + * Returns an option to specify whether blob listing should include subdirectories or not. + */ public static BlobListOption recursive(boolean recursive) { return new BlobListOption(StorageRpc.Option.DELIMITER, recursive); } + + /** + * Returns an option to specify the blob's fields to be returned by the RPC call. If this option + * is not provided all blob's fields are returned. {@code BlobListOption.fields}) can be used to + * specify only the fields of interest. Blob name and bucket are always returned, even if not + * specified. + */ + public static BlobListOption fields(BlobField... fields) { + StringBuilder builder = new StringBuilder(); + builder.append("items(").append(BlobField.selector(fields)).append(")"); + return new BlobListOption(StorageRpc.Option.FIELDS, builder.toString()); + } } + /** + * Class for specifying signed URL options. + */ class SignUrlOption implements Serializable { private static final long serialVersionUID = 7850569877451099267L; @@ -379,6 +775,12 @@ public static SignUrlOption serviceAccount(ServiceAccountAuthCredentials credent } } + /** + * A class to contain all information needed for a Google Cloud Storage Compose operation. + * + * @see + * Compose Operation + */ class ComposeRequest implements Serializable { private static final long serialVersionUID = -7385681353748590911L; @@ -387,6 +789,9 @@ class ComposeRequest implements Serializable { private final BlobInfo target; private final List targetOptions; + /** + * Class for Compose source blobs. + */ public static class SourceBlob implements Serializable { private static final long serialVersionUID = 4094962795951990439L; @@ -418,6 +823,9 @@ public static class Builder { private final Set targetOptions = new LinkedHashSet<>(); private BlobInfo target; + /** + * Add source blobs for compose operation. + */ public Builder addSource(Iterable blobs) { for (String blob : blobs) { sourceBlobs.add(new SourceBlob(blob)); @@ -425,6 +833,9 @@ public Builder addSource(Iterable blobs) { return this; } + /** + * Add source blobs for compose operation. + */ public Builder addSource(String... blobs) { return addSource(Arrays.asList(blobs)); } @@ -437,21 +848,33 @@ public Builder addSource(String blob, long generation) { return this; } + /** + * Sets compose operation's target blob. + */ public Builder target(BlobInfo target) { this.target = target; return this; } + /** + * Sets compose operation's target blob options. + */ public Builder targetOptions(BlobTargetOption... options) { Collections.addAll(targetOptions, options); return this; } + /** + * Sets compose operation's target blob options. + */ public Builder targetOptions(Iterable options) { Iterables.addAll(targetOptions, options); return this; } + /** + * Creates a {@code ComposeRequest} object. + */ public ComposeRequest build() { checkArgument(!sourceBlobs.isEmpty()); checkNotNull(target); @@ -465,39 +888,68 @@ private ComposeRequest(Builder builder) { targetOptions = ImmutableList.copyOf(builder.targetOptions); } + /** + * Returns compose operation's source blobs. + */ public List sourceBlobs() { return sourceBlobs; } + /** + * Returns compose operation's target blob. + */ public BlobInfo target() { return target; } + /** + * Returns compose operation's target blob's options. + */ public List targetOptions() { return targetOptions; } + /** + * Creates a {@code ComposeRequest} object. + * + * @param sources source blobs names + * @param target target blob + */ public static ComposeRequest of(Iterable sources, BlobInfo target) { return builder().target(target).addSource(sources).build(); } + /** + * Creates a {@code ComposeRequest} object. + * + * @param bucket name of the bucket where the compose operation takes place + * @param sources source blobs names + * @param target target blob name + */ public static ComposeRequest of(String bucket, Iterable sources, String target) { return of(sources, BlobInfo.builder(BlobId.of(bucket, target)).build()); } + /** + * Returns a {@code ComposeRequest} builder. + */ public static Builder builder() { return new Builder(); } } + /** + * A class to contain all information needed for a Google Cloud Storage Copy operation. + */ class CopyRequest implements Serializable { - private static final long serialVersionUID = -2606508373751748775L; + private static final long serialVersionUID = -4498650529476219937L; private final BlobId source; private final List sourceOptions; private final BlobInfo target; private final List targetOptions; + private final Long megabytesCopiedPerChunk; public static class Builder { @@ -505,42 +957,105 @@ public static class Builder { private final Set targetOptions = new LinkedHashSet<>(); private BlobId source; private BlobInfo target; + private Long megabytesCopiedPerChunk; + /** + * Sets the blob to copy given bucket and blob name. + * + * @return the builder + */ public Builder source(String bucket, String blob) { this.source = BlobId.of(bucket, blob); return this; } + /** + * Sets the blob to copy given a {@link BlobId}. + * + * @return the builder + */ public Builder source(BlobId source) { this.source = source; return this; } + /** + * Sets blob's source options. + * + * @return the builder + */ public Builder sourceOptions(BlobSourceOption... options) { Collections.addAll(sourceOptions, options); return this; } + /** + * Sets blob's source options. + * + * @return the builder + */ public Builder sourceOptions(Iterable options) { Iterables.addAll(sourceOptions, options); return this; } - public Builder target(BlobInfo target) { - this.target = target; + /** + * Sets the copy target. Target blob information is copied from source. + * + * @return the builder + */ + public Builder target(BlobId target) { + this.target = BlobInfo.builder(target).build(); return this; } - public Builder targetOptions(BlobTargetOption... options) { + /** + * Sets the copy target and target options. {@code target} parameter is used to override + * source blob information (e.g. {@code contentType}, {@code contentLanguage}). {@code + * target.contentType} is a required field. + * + * @return the builder + * @throws IllegalArgumentException if {@code target.contentType} is {@code null} + */ + public Builder target(BlobInfo target, BlobTargetOption... options) + throws IllegalArgumentException { + checkContentType(target); + this.target = target; Collections.addAll(targetOptions, options); return this; } - public Builder targetOptions(Iterable options) { + /** + * Sets the copy target and target options. {@code target} parameter is used to override + * source blob information (e.g. {@code contentType}, {@code contentLanguage}). {@code + * target.contentType} is a required field. + * + * @return the builder + * @throws IllegalArgumentException if {@code target.contentType} is {@code null} + */ + public Builder target(BlobInfo target, Iterable options) + throws IllegalArgumentException { + checkContentType(target); + this.target = target; Iterables.addAll(targetOptions, options); return this; } + /** + * Sets the maximum number of megabytes to copy for each RPC call. This parameter is ignored + * if source and target blob share the same location and storage class as copy is made with + * one single RPC. + * + * @return the builder + */ + public Builder megabytesCopiedPerChunk(Long megabytesCopiedPerChunk) { + this.megabytesCopiedPerChunk = megabytesCopiedPerChunk; + return this; + } + + /** + * Creates a {@code CopyRequest} object. + */ public CopyRequest build() { checkNotNull(source); checkNotNull(target); @@ -553,51 +1068,147 @@ private CopyRequest(Builder builder) { sourceOptions = ImmutableList.copyOf(builder.sourceOptions); target = checkNotNull(builder.target); targetOptions = ImmutableList.copyOf(builder.targetOptions); + megabytesCopiedPerChunk = builder.megabytesCopiedPerChunk; } + /** + * Returns the blob to copy, as a {@link BlobId}. + */ public BlobId source() { return source; } + /** + * Returns blob's source options. + */ public List sourceOptions() { return sourceOptions; } + /** + * Returns the {@link BlobInfo} for the target blob. + */ public BlobInfo target() { return target; } + /** + * Returns blob's target options. + */ public List targetOptions() { return targetOptions; } - public static CopyRequest of(String sourceBucket, String sourceBlob, BlobInfo target) { + /** + * Returns the maximum number of megabytes to copy for each RPC call. This parameter is ignored + * if source and target blob share the same location and storage class as copy is made with + * one single RPC. + */ + public Long megabytesCopiedPerChunk() { + return megabytesCopiedPerChunk; + } + + /** + * Creates a copy request. {@code target} parameter is used to override source blob information + * (e.g. {@code contentType}, {@code contentLanguage}). {@code target.contentType} is a required + * field. + * + * @param sourceBucket name of the bucket containing the source blob + * @param sourceBlob name of the source blob + * @param target a {@code BlobInfo} object for the target blob + * @return a copy request + * @throws IllegalArgumentException if {@code target.contentType} is {@code null} + */ + public static CopyRequest of(String sourceBucket, String sourceBlob, BlobInfo target) + throws IllegalArgumentException { + checkContentType(target); return builder().source(sourceBucket, sourceBlob).target(target).build(); } - public static CopyRequest of(BlobId sourceBlobId, BlobInfo target) { + /** + * Creates a copy request. {@code target} parameter is used to override source blob information + * (e.g. {@code contentType}, {@code contentLanguage}). {@code target.contentType} is a required + * field. + * + * @param sourceBlobId a {@code BlobId} object for the source blob + * @param target a {@code BlobInfo} object for the target blob + * @return a copy request + * @throws IllegalArgumentException if {@code target.contentType} is {@code null} + */ + public static CopyRequest of(BlobId sourceBlobId, BlobInfo target) + throws IllegalArgumentException { + checkContentType(target); return builder().source(sourceBlobId).target(target).build(); } + /** + * Creates a copy request. Target blob information is copied from source. + * + * @param sourceBucket name of the bucket containing both the source and the target blob + * @param sourceBlob name of the source blob + * @param targetBlob name of the target blob + * @return a copy request + */ public static CopyRequest of(String sourceBucket, String sourceBlob, String targetBlob) { - return of(sourceBucket, sourceBlob, - BlobInfo.builder(BlobId.of(sourceBucket, targetBlob)).build()); + return CopyRequest.builder() + .source(sourceBucket, sourceBlob) + .target(BlobId.of(sourceBucket, targetBlob)) + .build(); + } + + /** + * Creates a copy request. Target blob information is copied from source. + * + * @param sourceBucket name of the bucket containing the source blob + * @param sourceBlob name of the source blob + * @param target a {@code BlobId} object for the target blob + * @return a copy request + */ + public static CopyRequest of(String sourceBucket, String sourceBlob, BlobId target) { + return builder().source(sourceBucket, sourceBlob).target(target).build(); } + /** + * Creates a copy request. Target blob information is copied from source. + * + * @param sourceBlobId a {@code BlobId} object for the source blob + * @param targetBlob name of the target blob, in the same bucket of the source blob + * @return a copy request + */ public static CopyRequest of(BlobId sourceBlobId, String targetBlob) { - return of(sourceBlobId, - BlobInfo.builder(BlobId.of(sourceBlobId.bucket(), targetBlob)).build()); + return CopyRequest.builder() + .source(sourceBlobId) + .target(BlobId.of(sourceBlobId.bucket(), targetBlob)) + .build(); + } + + /** + * Creates a copy request. Target blob information is copied from source. + * + * @param sourceBlobId a {@code BlobId} object for the source blob + * @param targetBlobId a {@code BlobId} object for the target blob + * @return a copy request + */ + public static CopyRequest of(BlobId sourceBlobId, BlobId targetBlobId) { + return CopyRequest.builder() + .source(sourceBlobId) + .target(targetBlobId) + .build(); } public static Builder builder() { return new Builder(); } + + private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentException { + checkArgument(blobInfo.contentType() != null, "Blob content type can not be null"); + } } /** * Create a new bucket. * - * @return a complete bucket information. + * @return a complete bucket information * @throws StorageException upon failure */ BucketInfo create(BucketInfo bucketInfo, BucketTargetOption... options); @@ -605,7 +1216,7 @@ public static Builder builder() { /** * Create a new blob with no content. * - * @return a complete blob information. + * @return a complete blob information * @throws StorageException upon failure */ BlobInfo create(BlobInfo blobInfo, BlobTargetOption... options); @@ -615,7 +1226,7 @@ public static Builder builder() { * {@link #writer} is recommended as it uses resumable upload. MD5 and CRC32C hashes of * {@code content} are computed and used for validating transferred data. * - * @return a complete blob information. + * @return a complete blob information * @throws StorageException upon failure * @see Hashes and ETags */ @@ -625,9 +1236,10 @@ public static Builder builder() { * Create a new blob. Direct upload is used to upload {@code content}. For large content, * {@link #writer} is recommended as it uses resumable upload. By default any md5 and crc32c * values in the given {@code blobInfo} are ignored unless requested via the - * {@code BlobWriteOption.md5Match} and {@code BlobWriteOption.crc32cMatch} options. + * {@code BlobWriteOption.md5Match} and {@code BlobWriteOption.crc32cMatch} options. The given + * input stream is closed upon success. * - * @return a complete blob information. + * @return a complete blob information * @throws StorageException upon failure */ BlobInfo create(BlobInfo blobInfo, InputStream content, BlobWriteOption... options); @@ -637,21 +1249,21 @@ public static Builder builder() { * * @throws StorageException upon failure */ - BucketInfo get(String bucket, BucketSourceOption... options); + BucketInfo get(String bucket, BucketGetOption... options); /** * Return the requested blob or {@code null} if not found. * * @throws StorageException upon failure */ - BlobInfo get(String bucket, String blob, BlobSourceOption... options); + BlobInfo get(String bucket, String blob, BlobGetOption... options); /** * Return the requested blob or {@code null} if not found. * * @throws StorageException upon failure */ - BlobInfo get(BlobId blob, BlobSourceOption... options); + BlobInfo get(BlobId blob, BlobGetOption... options); /** * Return the requested blob or {@code null} if not found. @@ -665,14 +1277,14 @@ public static Builder builder() { * * @throws StorageException upon failure */ - ListResult list(BucketListOption... options); + Page list(BucketListOption... options); /** * List the bucket's blobs. * * @throws StorageException upon failure */ - ListResult list(String bucket, BlobListOption... options); + Page list(String bucket, BlobListOption... options); /** * Update bucket information. @@ -683,7 +1295,14 @@ public static Builder builder() { BucketInfo update(BucketInfo bucketInfo, BucketTargetOption... options); /** - * Update blob information. + * Update blob information. Original metadata are merged with metadata in the provided + * {@code blobInfo}. To replace metadata instead you first have to unset them. Unsetting metadata + * can be done by setting the provided {@code blobInfo}'s metadata to {@code null}. + * + *

      Example usage of replacing blob's metadata: + *

          {@code service.update(BlobInfo.builder("bucket", "name").metadata(null).build());}
      +   *    {@code service.update(BlobInfo.builder("bucket", "name").metadata(newMetadata).build());}
      +   * 
      * * @return the updated blob * @throws StorageException upon failure @@ -691,7 +1310,14 @@ public static Builder builder() { BlobInfo update(BlobInfo blobInfo, BlobTargetOption... options); /** - * Update blob information. + * Update blob information. Original metadata are merged with metadata in the provided + * {@code blobInfo}. To replace metadata instead you first have to unset them. Unsetting metadata + * can be done by setting the provided {@code blobInfo}'s metadata to {@code null}. + * + *

      Example usage of replacing blob's metadata: + *

          {@code service.update(BlobInfo.builder("bucket", "name").metadata(null).build());}
      +   *    {@code service.update(BlobInfo.builder("bucket", "name").metadata(newMetadata).build());}
      +   * 
      * * @return the updated blob * @throws StorageException upon failure @@ -701,7 +1327,7 @@ public static Builder builder() { /** * Delete the requested bucket. * - * @return true if bucket was deleted + * @return {@code true} if bucket was deleted, {@code false} if it was not found * @throws StorageException upon failure */ boolean delete(String bucket, BucketSourceOption... options); @@ -709,7 +1335,7 @@ public static Builder builder() { /** * Delete the requested blob. * - * @return true if blob was deleted + * @return {@code true} if blob was deleted, {@code false} if it was not found * @throws StorageException upon failure */ boolean delete(String bucket, String blob, BlobSourceOption... options); @@ -717,7 +1343,7 @@ public static Builder builder() { /** * Delete the requested blob. * - * @return true if blob was deleted + * @return {@code true} if blob was deleted, {@code false} if it was not found * @throws StorageException upon failure */ boolean delete(BlobId blob, BlobSourceOption... options); @@ -725,7 +1351,7 @@ public static Builder builder() { /** * Delete the requested blob. * - * @return true if blob was deleted + * @return {@code true} if blob was deleted, {@code false} if it was not found * @throws StorageException upon failure */ boolean delete(BlobId blob); @@ -733,23 +1359,42 @@ public static Builder builder() { /** * Send a compose request. * - * @return the composed blob. + * @return the composed blob * @throws StorageException upon failure */ BlobInfo compose(ComposeRequest composeRequest); /** - * Send a copy request. + * Sends a copy request. Returns a {@link CopyWriter} object for the provided + * {@code CopyRequest}. If source and destination objects share the same location and storage + * class the source blob is copied with one request and {@link CopyWriter#result()} immediately + * returns, regardless of the {@link CopyRequest#megabytesCopiedPerChunk} parameter. + * If source and destination have different location or storage class {@link CopyWriter#result()} + * might issue multiple RPC calls depending on blob's size. * - * @return the copied blob. + *

      Example usage of copy: + *

          {@code BlobInfo blob = service.copy(copyRequest).result();}
      +   * 
      + * To explicitly issue chunk copy requests use {@link CopyWriter#copyChunk()} instead: + *
          {@code CopyWriter copyWriter = service.copy(copyRequest);
      +   *    while (!copyWriter.isDone()) {
      +   *        copyWriter.copyChunk();
      +   *    }
      +   *    BlobInfo blob = copyWriter.result();
      +   * }
      +   * 
      + * + * @return a {@link CopyWriter} object that can be used to get information on the newly created + * blob or to complete the copy if more than one RPC request is needed * @throws StorageException upon failure + * @see Rewrite */ - BlobInfo copy(CopyRequest copyRequest); + CopyWriter copy(CopyRequest copyRequest); /** * Reads all the bytes from a blob. * - * @return the blob's content. + * @return the blob's content * @throws StorageException upon failure */ byte[] readAllBytes(String bucket, String blob, BlobSourceOption... options); @@ -757,7 +1402,7 @@ public static Builder builder() { /** * Reads all the bytes from a blob. * - * @return the blob's content. + * @return the blob's content * @throws StorageException upon failure */ byte[] readAllBytes(BlobId blob, BlobSourceOption... options); @@ -768,21 +1413,36 @@ public static Builder builder() { * @return the batch response * @throws StorageException upon failure */ - BatchResponse apply(BatchRequest batchRequest); + BatchResponse submit(BatchRequest batchRequest); /** - * Return a channel for reading the blob's content. + * Return a channel for reading the blob's content. The blob's latest generation is read. If the + * blob changes while reading (i.e. {@link BlobInfo#etag()} changes), subsequent calls to + * {@code blobReadChannel.read(ByteBuffer)} may throw {@link StorageException}. + * + *

      The {@link BlobSourceOption#generationMatch(long)} option can be provided to ensure that + * {@code blobReadChannel.read(ByteBuffer)} calls will throw {@link StorageException} if blob`s + * generation differs from the expected one. * * @throws StorageException upon failure */ - BlobReadChannel reader(String bucket, String blob, BlobSourceOption... options); + ReadChannel reader(String bucket, String blob, BlobSourceOption... options); /** - * Return a channel for reading the blob's content. + * Return a channel for reading the blob's content. If {@code blob.generation()} is set + * data corresponding to that generation is read. If {@code blob.generation()} is {@code null} + * the blob's latest generation is read. If the blob changes while reading (i.e. + * {@link BlobInfo#etag()} changes), subsequent calls to {@code blobReadChannel.read(ByteBuffer)} + * may throw {@link StorageException}. + * + *

      The {@link BlobSourceOption#generationMatch()} and + * {@link BlobSourceOption#generationMatch(long)} options can be used to ensure that + * {@code blobReadChannel.read(ByteBuffer)} calls will throw {@link StorageException} if the + * blob`s generation differs from the expected one. * * @throws StorageException upon failure */ - BlobReadChannel reader(BlobId blob, BlobSourceOption... options); + ReadChannel reader(BlobId blob, BlobSourceOption... options); /** * Create a blob and return a channel for writing its content. By default any md5 and crc32c @@ -791,7 +1451,7 @@ public static Builder builder() { * * @throws StorageException upon failure */ - BlobWriteChannel writer(BlobInfo blobInfo, BlobWriteOption... options); + WriteChannel writer(BlobInfo blobInfo, BlobWriteOption... options); /** * Generates a signed URL for a blob. @@ -800,8 +1460,8 @@ public static Builder builder() { * is only valid within a certain time period. * This is particularly useful if you don't want publicly * accessible blobs, but don't want to require users to explicitly log in. - *

      - * Example usage of creating a signed URL that is valid for 2 weeks: + * + *

      Example usage of creating a signed URL that is valid for 2 weeks: *

         {@code
          *     service.signUrl(BlobInfo.builder("bucket", "name").build(), 14, TimeUnit.DAYS);
          * }
      @@ -826,7 +1486,11 @@ public static Builder builder() { List get(BlobId... blobIds); /** - * Updates the requested blobs. A batch request is used to perform this call. + * Updates the requested blobs. A batch request is used to perform this call. Original metadata + * are merged with metadata in the provided {@code BlobInfo} objects. To replace metadata instead + * you first have to unset them. Unsetting metadata can be done by setting the provided + * {@code BlobInfo} objects metadata to {@code null}. See + * {@link #update(com.google.gcloud.storage.BlobInfo)} for a code example. * * @param blobInfos blobs to update * @return an immutable list of {@code BlobInfo} objects. If a blob does not exist or access to it @@ -840,8 +1504,8 @@ public static Builder builder() { * * @param blobIds blobs to delete * @return an immutable list of booleans. If a blob has been deleted the corresponding item in the - * list is {@code true}. If deletion failed or access to the resource was denied the item is - * {@code false}. + * list is {@code true}. If a blob was not found, deletion failed or access to the resource + * was denied the corresponding item is {@code false}. * @throws StorageException upon failure */ List delete(BlobId... blobIds); diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageException.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageException.java index e354e3a6d427..0c952c9a65d6 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageException.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageException.java @@ -16,8 +16,14 @@ package com.google.gcloud.storage; -import com.google.gcloud.RetryHelper; +import com.google.api.client.googleapis.json.GoogleJsonError; +import com.google.common.collect.ImmutableSet; +import com.google.gcloud.BaseServiceException; import com.google.gcloud.RetryHelper.RetryHelperException; +import com.google.gcloud.RetryHelper.RetryInterruptedException; + +import java.io.IOException; +import java.util.Set; /** * Storage service exception. @@ -25,29 +31,35 @@ * @see Google Cloud * Storage error codes */ -public class StorageException extends RuntimeException { +public class StorageException extends BaseServiceException { - private static final long serialVersionUID = -3748432005065428084L; - private static final int UNKNOWN_CODE = -1; + // see: https://cloud.google.com/storage/docs/concepts-techniques#practices + private static final Set RETRYABLE_ERRORS = ImmutableSet.of( + new Error(504, null), + new Error(503, null), + new Error(502, null), + new Error(500, null), + new Error(429, null), + new Error(408, null), + new Error(null, "internalError")); - private final int code; - private final boolean retryable; + private static final long serialVersionUID = -4168430271327813063L; - public StorageException(int code, String message, boolean retryable) { - super(message); - this.code = code; - this.retryable = retryable; + public StorageException(int code, String message) { + super(code, message, null, true); } - /** - * Returns the code associated with this exception. - */ - public int code() { - return code; + public StorageException(IOException exception) { + super(exception, true); + } + + public StorageException(GoogleJsonError error) { + super(error, true); } - public boolean retryable() { - return retryable; + @Override + protected Set retryableErrors() { + return RETRYABLE_ERRORS; } /** @@ -58,12 +70,7 @@ public boolean retryable() { * @throws RetryInterruptedException when {@code ex} is a {@code RetryInterruptedException} */ static StorageException translateAndThrow(RetryHelperException ex) { - if (ex.getCause() instanceof StorageException) { - throw (StorageException) ex.getCause(); - } - if (ex instanceof RetryHelper.RetryInterruptedException) { - RetryHelper.RetryInterruptedException.propagate(); - } - throw new StorageException(UNKNOWN_CODE, ex.getMessage(), false); + BaseServiceException.translateAndPropagateIfPossible(ex); + throw new StorageException(UNKNOWN_CODE, ex.getMessage()); } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageFactory.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageFactory.java index e269f0c9d92b..fbce5559464c 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageFactory.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageFactory.java @@ -17,27 +17,10 @@ package com.google.gcloud.storage; +import com.google.gcloud.ServiceFactory; + /** - * A base class for Storage factories. + * An interface for Storage factories. */ -public abstract class StorageFactory { - - private static final StorageFactory INSTANCE = new StorageFactory() { - @Override - public Storage get(StorageOptions options) { - return new StorageImpl(options); - } - }; - - /** - * Returns the default factory instance. - */ - public static StorageFactory instance() { - return INSTANCE; - } - - /** - * Returns a {@code Storage} service for the given options. - */ - public abstract Storage get(StorageOptions options); +public interface StorageFactory extends ServiceFactory { } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java index 6edd2713fded..b6a833f26ab4 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageImpl.java @@ -28,10 +28,10 @@ import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_GENERATION_NOT_MATCH; import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_METAGENERATION_MATCH; import static com.google.gcloud.spi.StorageRpc.Option.IF_SOURCE_METAGENERATION_NOT_MATCH; -import static java.net.HttpURLConnection.HTTP_NOT_FOUND; import static java.nio.charset.StandardCharsets.UTF_8; import com.google.api.services.storage.model.StorageObject; +import com.google.auth.oauth2.ServiceAccountCredentials; import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.collect.ImmutableList; @@ -39,16 +39,18 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import com.google.common.hash.Hashing; import com.google.common.io.BaseEncoding; import com.google.common.primitives.Ints; import com.google.gcloud.AuthCredentials.ServiceAccountAuthCredentials; import com.google.gcloud.BaseService; -import com.google.gcloud.ExceptionHandler; -import com.google.gcloud.ExceptionHandler.Interceptor; +import com.google.gcloud.Page; +import com.google.gcloud.PageImpl; +import com.google.gcloud.PageImpl.NextPageFetcher; +import com.google.gcloud.ReadChannel; import com.google.gcloud.RetryHelper.RetryHelperException; import com.google.gcloud.spi.StorageRpc; +import com.google.gcloud.spi.StorageRpc.RewriteResponse; import com.google.gcloud.spi.StorageRpc.Tuple; import java.io.ByteArrayInputStream; @@ -67,32 +69,11 @@ import java.util.EnumMap; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; final class StorageImpl extends BaseService implements Storage { - private static final Interceptor EXCEPTION_HANDLER_INTERCEPTOR = new Interceptor() { - - private static final long serialVersionUID = -7758580330857881124L; - - @Override - public RetryResult afterEval(Exception exception, RetryResult retryResult) { - return Interceptor.RetryResult.CONTINUE_EVALUATION; - } - - @Override - public RetryResult beforeEval(Exception exception) { - if (exception instanceof StorageException) { - boolean retriable = ((StorageException) exception).retryable(); - return retriable ? Interceptor.RetryResult.RETRY : Interceptor.RetryResult.NO_RETRY; - } - return Interceptor.RetryResult.CONTINUE_EVALUATION; - } - }; - static final ExceptionHandler EXCEPTION_HANDLER = ExceptionHandler.builder() - .abortOn(RuntimeException.class).interceptor(EXCEPTION_HANDLER_INTERCEPTOR).build(); private static final byte[] EMPTY_BYTE_ARRAY = {}; private static final String EMPTY_BYTE_ARRAY_MD5 = "1B2M2Y8AsgTpgAmY7PhCfg=="; private static final String EMPTY_BYTE_ARRAY_CRC32C = "AAAAAA=="; @@ -101,9 +82,7 @@ public RetryResult beforeEval(Exception exception) { StorageImpl(StorageOptions options) { super(options); - storageRpc = options.storageRpc(); - // todo: provide rewrite - https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite - // todo: check if we need to expose https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/insert vs using bucket update/patch + storageRpc = options.rpc(); } @Override @@ -166,7 +145,7 @@ public StorageObject call() { } @Override - public BucketInfo get(String bucket, BucketSourceOption... options) { + public BucketInfo get(String bucket, BucketGetOption... options) { final com.google.api.services.storage.model.Bucket bucketPb = BucketInfo.of(bucket).toPb(); final Map optionsMap = optionMap(options); try { @@ -174,14 +153,7 @@ public BucketInfo get(String bucket, BucketSourceOption... options) { new Callable() { @Override public com.google.api.services.storage.model.Bucket call() { - try { - return storageRpc.get(bucketPb, optionsMap); - } catch (StorageException ex) { - if (ex.code() == HTTP_NOT_FOUND) { - return null; - } - throw ex; - } + return storageRpc.get(bucketPb, optionsMap); } }, options().retryParams(), EXCEPTION_HANDLER); return answer == null ? null : BucketInfo.fromPb(answer); @@ -191,26 +163,19 @@ public com.google.api.services.storage.model.Bucket call() { } @Override - public BlobInfo get(String bucket, String blob, BlobSourceOption... options) { + public BlobInfo get(String bucket, String blob, BlobGetOption... options) { return get(BlobId.of(bucket, blob), options); } @Override - public BlobInfo get(BlobId blob, BlobSourceOption... options) { + public BlobInfo get(BlobId blob, BlobGetOption... options) { final StorageObject storedObject = blob.toPb(); - final Map optionsMap = optionMap(options); + final Map optionsMap = optionMap(blob, options); try { StorageObject storageObject = runWithRetries(new Callable() { @Override public StorageObject call() { - try { - return storageRpc.get(storedObject, optionsMap); - } catch (StorageException ex) { - if (ex.code() == HTTP_NOT_FOUND) { - return null; - } - throw ex; - } + return storageRpc.get(storedObject, optionsMap); } }, options().retryParams(), EXCEPTION_HANDLER); return storageObject == null ? null : BlobInfo.fromPb(storageObject); @@ -221,77 +186,68 @@ public StorageObject call() { @Override public BlobInfo get(BlobId blob) { - return get(blob, new BlobSourceOption[0]); + return get(blob, new BlobGetOption[0]); } - private static abstract class BasePageFetcher - implements BaseListResult.NextPageFetcher { + private static class BucketPageFetcher implements NextPageFetcher { - private static final long serialVersionUID = 8236329004030295223L; - protected final Map requestOptions; - protected final StorageOptions serviceOptions; + private static final long serialVersionUID = 5850406828803613729L; + private final Map requestOptions; + private final StorageOptions serviceOptions; - BasePageFetcher(StorageOptions serviceOptions, String cursor, + BucketPageFetcher( + StorageOptions serviceOptions, String cursor, Map optionMap) { + this.requestOptions = + PageImpl.nextRequestOptions(StorageRpc.Option.PAGE_TOKEN, cursor, optionMap); this.serviceOptions = serviceOptions; - ImmutableMap.Builder builder = ImmutableMap.builder(); - if (cursor != null) { - builder.put(StorageRpc.Option.PAGE_TOKEN, cursor); - } - for (Map.Entry option : optionMap.entrySet()) { - if (option.getKey() != StorageRpc.Option.PAGE_TOKEN) { - builder.put(option.getKey(), option.getValue()); - } - } - this.requestOptions = builder.build(); - } - } - - private static class BucketPageFetcher extends BasePageFetcher { - - private static final long serialVersionUID = -5490616010200159174L; - - BucketPageFetcher(StorageOptions serviceOptions, String cursor, - Map optionMap) { - super(serviceOptions, cursor, optionMap); } @Override - public ListResult nextPage() { + public Page nextPage() { return listBuckets(serviceOptions, requestOptions); } } - private static class BlobPageFetcher extends BasePageFetcher { + private static class BlobPageFetcher implements NextPageFetcher { - private static final long serialVersionUID = -5490616010200159174L; + private static final long serialVersionUID = 81807334445874098L; + private final Map requestOptions; + private final StorageOptions serviceOptions; private final String bucket; BlobPageFetcher(String bucket, StorageOptions serviceOptions, String cursor, Map optionMap) { - super(serviceOptions, cursor, optionMap); + this.requestOptions = + PageImpl.nextRequestOptions(StorageRpc.Option.PAGE_TOKEN, cursor, optionMap); + this.serviceOptions = serviceOptions; this.bucket = bucket; } @Override - public ListResult nextPage() { + public Page nextPage() { return listBlobs(bucket, serviceOptions, requestOptions); } } @Override - public ListResult list(BucketListOption... options) { + public Page list(BucketListOption... options) { return listBuckets(options(), optionMap(options)); } - private static ListResult listBuckets(final StorageOptions serviceOptions, + @Override + public Page list(final String bucket, BlobListOption... options) { + return listBlobs(bucket, options(), optionMap(options)); + } + + private static Page listBuckets(final StorageOptions serviceOptions, final Map optionsMap) { try { Tuple> result = runWithRetries( new Callable>>() { @Override public Tuple> call() { - return serviceOptions.storageRpc().list(optionsMap); + return serviceOptions.rpc().list(optionsMap); } }, serviceOptions.retryParams(), EXCEPTION_HANDLER); String cursor = result.x(); @@ -303,26 +259,21 @@ public BucketInfo apply(com.google.api.services.storage.model.Bucket bucketPb) { return BucketInfo.fromPb(bucketPb); } }); - return new BaseListResult<>(new BucketPageFetcher(serviceOptions, cursor, optionsMap), cursor, + return new PageImpl<>(new BucketPageFetcher(serviceOptions, cursor, optionsMap), cursor, buckets); } catch (RetryHelperException e) { throw StorageException.translateAndThrow(e); } } - @Override - public ListResult list(final String bucket, BlobListOption... options) { - return listBlobs(bucket, options(), optionMap(options)); - } - - private static ListResult listBlobs(final String bucket, + private static Page listBlobs(final String bucket, final StorageOptions serviceOptions, final Map optionsMap) { try { Tuple> result = runWithRetries( new Callable>>() { @Override public Tuple> call() { - return serviceOptions.storageRpc().list(bucket, optionsMap); + return serviceOptions.rpc().list(bucket, optionsMap); } }, serviceOptions.retryParams(), EXCEPTION_HANDLER); String cursor = result.x(); @@ -334,7 +285,7 @@ public BlobInfo apply(StorageObject storageObject) { return BlobInfo.fromPb(storageObject); } }); - return new BaseListResult<>(new BlobPageFetcher(bucket, serviceOptions, cursor, optionsMap), + return new PageImpl<>(new BlobPageFetcher(bucket, serviceOptions, cursor, optionsMap), cursor, blobs); } catch (RetryHelperException e) { @@ -404,7 +355,7 @@ public boolean delete(String bucket, String blob, BlobSourceOption... options) { @Override public boolean delete(BlobId blob, BlobSourceOption... options) { final StorageObject storageObject = blob.toPb(); - final Map optionsMap = optionMap(options); + final Map optionsMap = optionMap(blob, options); try { return runWithRetries(new Callable() { @Override @@ -427,8 +378,9 @@ public BlobInfo compose(final ComposeRequest composeRequest) { final List sources = Lists.newArrayListWithCapacity(composeRequest.sourceBlobs().size()); for (ComposeRequest.SourceBlob sourceBlob : composeRequest.sourceBlobs()) { - sources.add(BlobInfo.builder(composeRequest.target().bucket(), sourceBlob.name()) - .generation(sourceBlob.generation()).build().toPb()); + sources.add(BlobInfo.builder( + BlobId.of(composeRequest.target().bucket(), sourceBlob.name(), sourceBlob.generation())) + .build().toPb()); } final StorageObject target = composeRequest.target().toPb(); final Map targetOptions = optionMap(composeRequest.target().generation(), @@ -446,21 +398,22 @@ public StorageObject call() { } @Override - public BlobInfo copy(CopyRequest copyRequest) { + public CopyWriter copy(final CopyRequest copyRequest) { final StorageObject source = copyRequest.source().toPb(); - copyRequest.sourceOptions(); final Map sourceOptions = - optionMap(null, null, copyRequest.sourceOptions(), true); + optionMap(copyRequest.source().generation(), null, copyRequest.sourceOptions(), true); final StorageObject target = copyRequest.target().toPb(); final Map targetOptions = optionMap(copyRequest.target().generation(), copyRequest.target().metageneration(), copyRequest.targetOptions()); try { - return BlobInfo.fromPb(runWithRetries(new Callable() { + RewriteResponse rewriteResponse = runWithRetries(new Callable() { @Override - public StorageObject call() { - return storageRpc.copy(source, sourceOptions, target, targetOptions); + public RewriteResponse call() { + return storageRpc.openRewrite(new StorageRpc.RewriteRequest(source, sourceOptions, target, + targetOptions, copyRequest.megabytesCopiedPerChunk())); } - }, options().retryParams(), EXCEPTION_HANDLER)); + }, options().retryParams(), EXCEPTION_HANDLER); + return new CopyWriter(options(), rewriteResponse); } catch (RetryHelperException e) { throw StorageException.translateAndThrow(e); } @@ -474,7 +427,7 @@ public byte[] readAllBytes(String bucket, String blob, BlobSourceOption... optio @Override public byte[] readAllBytes(BlobId blob, BlobSourceOption... options) { final StorageObject storageObject = blob.toPb(); - final Map optionsMap = optionMap(options); + final Map optionsMap = optionMap(blob, options); try { return runWithRetries(new Callable() { @Override @@ -488,18 +441,19 @@ public byte[] call() { } @Override - public BatchResponse apply(BatchRequest batchRequest) { + public BatchResponse submit(BatchRequest batchRequest) { List>> toDelete = Lists.newArrayListWithCapacity(batchRequest.toDelete().size()); for (Map.Entry> entry : batchRequest.toDelete().entrySet()) { BlobId blob = entry.getKey(); - Map optionsMap = optionMap(null, null, entry.getValue()); + Map optionsMap = optionMap(blob.generation(), null, entry.getValue()); StorageObject storageObject = blob.toPb(); toDelete.add(Tuple.>of(storageObject, optionsMap)); } List>> toUpdate = Lists.newArrayListWithCapacity(batchRequest.toUpdate().size()); - for (Map.Entry> entry : batchRequest.toUpdate().entrySet()) { + for (Map.Entry> entry : + batchRequest.toUpdate().entrySet()) { BlobInfo blobInfo = entry.getKey(); Map optionsMap = optionMap(blobInfo.generation(), blobInfo.metageneration(), entry.getValue()); @@ -507,9 +461,9 @@ public BatchResponse apply(BatchRequest batchRequest) { } List>> toGet = Lists.newArrayListWithCapacity(batchRequest.toGet().size()); - for (Map.Entry> entry : batchRequest.toGet().entrySet()) { + for (Map.Entry> entry : batchRequest.toGet().entrySet()) { BlobId blob = entry.getKey(); - Map optionsMap = optionMap(null, null, entry.getValue()); + Map optionsMap = optionMap(blob.generation(), null, entry.getValue()); toGet.add(Tuple.>of(blob.toPb(), optionsMap)); } StorageRpc.BatchResponse response = @@ -519,43 +473,38 @@ public BatchResponse apply(BatchRequest batchRequest) { List> updates = transformBatchResult( toUpdate, response.updates, BlobInfo.FROM_PB_FUNCTION); List> gets = transformBatchResult( - toGet, response.gets, BlobInfo.FROM_PB_FUNCTION, HTTP_NOT_FOUND); + toGet, response.gets, BlobInfo.FROM_PB_FUNCTION); return new BatchResponse(deletes, updates, gets); } private List> transformBatchResult( Iterable>> request, - Map> results, Function transform, - int... nullOnErrorCodes) { - Set nullOnErrorCodesSet = Sets.newHashSet(Ints.asList(nullOnErrorCodes)); + Map> results, Function transform) { List> response = Lists.newArrayListWithCapacity(results.size()); for (Tuple tuple : request) { Tuple result = results.get(tuple.x()); - if (result.x() != null) { - response.add(BatchResponse.Result.of(transform.apply(result.x()))); + I object = result.x(); + StorageException exception = result.y(); + if (exception != null) { + response.add(new BatchResponse.Result(exception)); } else { - StorageException exception = result.y(); - if (nullOnErrorCodesSet.contains(exception.code())) { - //noinspection unchecked - response.add(BatchResponse.Result.empty()); - } else { - response.add(new BatchResponse.Result(exception)); - } + response.add(object != null + ? BatchResponse.Result.of(transform.apply(object)) : BatchResponse.Result.empty()); } } return response; } @Override - public BlobReadChannel reader(String bucket, String blob, BlobSourceOption... options) { + public ReadChannel reader(String bucket, String blob, BlobSourceOption... options) { Map optionsMap = optionMap(options); - return new BlobReadChannelImpl(options(), BlobId.of(bucket, blob), optionsMap); + return new BlobReadChannel(options(), BlobId.of(bucket, blob), optionsMap); } @Override - public BlobReadChannel reader(BlobId blob, BlobSourceOption... options) { - Map optionsMap = optionMap(options); - return new BlobReadChannelImpl(options(), blob, optionsMap); + public ReadChannel reader(BlobId blob, BlobSourceOption... options) { + Map optionsMap = optionMap(blob, options); + return new BlobReadChannel(options(), blob, optionsMap); } @Override @@ -566,25 +515,26 @@ public BlobWriteChannel writer(BlobInfo blobInfo, BlobWriteOption... options) { private BlobWriteChannel writer(BlobInfo blobInfo, BlobTargetOption... options) { final Map optionsMap = optionMap(blobInfo, options); - return new BlobWriteChannelImpl(options(), blobInfo, optionsMap); + return new BlobWriteChannel(options(), blobInfo, optionsMap); } @Override public URL signUrl(BlobInfo blobInfo, long duration, TimeUnit unit, SignUrlOption... options) { - long expiration = TimeUnit.SECONDS.convert( - options().clock().millis() + unit.toMillis(duration), TimeUnit.MILLISECONDS); EnumMap optionMap = Maps.newEnumMap(SignUrlOption.Option.class); for (SignUrlOption option : options) { optionMap.put(option.option(), option.value()); } - ServiceAccountAuthCredentials cred = + ServiceAccountAuthCredentials authCred = (ServiceAccountAuthCredentials) optionMap.get(SignUrlOption.Option.SERVICE_ACCOUNT_CRED); - if (cred == null) { - checkArgument(options().authCredentials() instanceof ServiceAccountAuthCredentials, + ServiceAccountCredentials cred = authCred != null ? authCred.credentials() : null; + if (authCred == null) { + checkArgument( + this.options().authCredentials() != null + && this.options().authCredentials().credentials() instanceof ServiceAccountCredentials, "Signing key was not provided and could not be derived"); - cred = (ServiceAccountAuthCredentials) this.options().authCredentials(); + cred = (ServiceAccountCredentials) this.options().authCredentials().credentials(); } - // construct signature data - see https://cloud.google.com/storage/docs/access-control#Signed-URLs + // construct signature - see https://cloud.google.com/storage/docs/access-control#Signed-URLs StringBuilder stBuilder = new StringBuilder(); if (optionMap.containsKey(SignUrlOption.Option.HTTP_METHOD)) { stBuilder.append(optionMap.get(SignUrlOption.Option.HTTP_METHOD)); @@ -602,6 +552,8 @@ public URL signUrl(BlobInfo blobInfo, long duration, TimeUnit unit, SignUrlOptio stBuilder.append(blobInfo.contentType()); } stBuilder.append('\n'); + long expiration = TimeUnit.SECONDS.convert( + options().clock().millis() + unit.toMillis(duration), TimeUnit.MILLISECONDS); stBuilder.append(expiration).append('\n'); StringBuilder path = new StringBuilder(); if (!blobInfo.bucket().startsWith("/")) { @@ -618,12 +570,12 @@ public URL signUrl(BlobInfo blobInfo, long duration, TimeUnit unit, SignUrlOptio stBuilder.append(path); try { Signature signer = Signature.getInstance("SHA256withRSA"); - signer.initSign(cred.privateKey()); + signer.initSign(cred.getPrivateKey()); signer.update(stBuilder.toString().getBytes(UTF_8)); + stBuilder = new StringBuilder("https://storage.googleapis.com").append(path); String signature = URLEncoder.encode(BaseEncoding.base64().encode(signer.sign()), UTF_8.name()); - stBuilder = new StringBuilder("https://storage.googleapis.com").append(path); - stBuilder.append("?GoogleAccessId=").append(cred.account()); + stBuilder.append("?GoogleAccessId=").append(cred.getClientEmail()); stBuilder.append("&Expires=").append(expiration); stBuilder.append("&Signature=").append(signature); return new URL(stBuilder.toString()); @@ -640,7 +592,7 @@ public List get(BlobId... blobIds) { for (BlobId blob : blobIds) { requestBuilder.get(blob); } - BatchResponse response = apply(requestBuilder.build()); + BatchResponse response = submit(requestBuilder.build()); return Collections.unmodifiableList(transformResultList(response.gets(), null)); } @@ -650,7 +602,7 @@ public List update(BlobInfo... blobInfos) { for (BlobInfo blobInfo : blobInfos) { requestBuilder.update(blobInfo); } - BatchResponse response = apply(requestBuilder.build()); + BatchResponse response = submit(requestBuilder.build()); return Collections.unmodifiableList(transformResultList(response.updates(), null)); } @@ -660,7 +612,7 @@ public List delete(BlobId... blobIds) { for (BlobId blob : blobIds) { requestBuilder.delete(blob); } - BatchResponse response = apply(requestBuilder.build()); + BatchResponse response = submit(requestBuilder.build()); return Collections.unmodifiableList(transformResultList(response.deletes(), Boolean.FALSE)); } @@ -668,12 +620,29 @@ private static List transformResultList( List> results, final T errorValue) { return Lists.transform(results, new Function, T>() { @Override - public T apply(BatchResponse.Result f) { - return f.failed() ? errorValue : f.get(); + public T apply(BatchResponse.Result result) { + return result.failed() ? errorValue : result.get(); } }); } + private static void addToOptionMap(StorageRpc.Option option, T defaultValue, + Map map) { + addToOptionMap(option, option, defaultValue, map); + } + + private static void addToOptionMap(StorageRpc.Option getOption, StorageRpc.Option putOption, + T defaultValue, Map map) { + if (map.containsKey(getOption)) { + @SuppressWarnings("unchecked") + T value = (T) map.remove(getOption); + checkArgument(value != null || defaultValue != null, + "Option " + getOption.value() + " is missing a value"); + value = firstNonNull(value, defaultValue); + map.put(putOption, value); + } + } + private Map optionMap(Long generation, Long metaGeneration, Iterable options) { return optionMap(generation, metaGeneration, options, false); @@ -705,23 +674,6 @@ public T apply(BatchResponse.Result f) { return ImmutableMap.copyOf(temp); } - private static void addToOptionMap(StorageRpc.Option option, T defaultValue, - Map map) { - addToOptionMap(option, option, defaultValue, map); - } - - private static void addToOptionMap(StorageRpc.Option getOption, StorageRpc.Option putOption, - T defaultValue, Map map) { - if (map.containsKey(getOption)) { - @SuppressWarnings("unchecked") - T value = (T) map.remove(getOption); - checkArgument(value != null || defaultValue != null, - "Option " + getOption.value() + " is missing a value"); - value = firstNonNull(value, defaultValue); - map.put(putOption, value); - } - } - private Map optionMap(Option... options) { return optionMap(null, null, Arrays.asList(options)); } @@ -738,4 +690,8 @@ private static void addToOptionMap(StorageRpc.Option getOption, StorageRpc.O private Map optionMap(BlobInfo blobInfo, Option... options) { return optionMap(blobInfo.generation(), blobInfo.metageneration(), options); } + + private Map optionMap(BlobId blobId, Option... options) { + return optionMap(blobId.generation(), null, options); + } } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageOptions.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageOptions.java index a439e3c8ae49..bd30cb173366 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageOptions.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/StorageOptions.java @@ -26,7 +26,7 @@ import java.util.Objects; import java.util.Set; -public class StorageOptions extends ServiceOptions { +public class StorageOptions extends ServiceOptions { private static final long serialVersionUID = -7804860602287801084L; private static final String GCS_SCOPE = "https://www.googleapis.com/auth/devstorage.full_control"; @@ -34,10 +34,29 @@ public class StorageOptions extends ServiceOptions { private static final String DEFAULT_PATH_DELIMITER = "/"; private final String pathDelimiter; - private transient StorageRpc storageRpc; + + public static class DefaultStorageFactory implements StorageFactory { + + private static final StorageFactory INSTANCE = new DefaultStorageFactory(); + + @Override + public Storage create(StorageOptions options) { + return new StorageImpl(options); + } + } + + public static class DefaultStorageRpcFactory implements StorageRpcFactory { + + private static final StorageRpcFactory INSTANCE = new DefaultStorageRpcFactory(); + + @Override + public StorageRpc create(StorageOptions options) { + return new DefaultStorageRpc(options); + } + } public static class Builder extends - ServiceOptions.Builder { + ServiceOptions.Builder { private String pathDelimiter; @@ -45,13 +64,14 @@ private Builder() {} private Builder(StorageOptions options) { super(options); + pathDelimiter = options.pathDelimiter; } /** * Sets the path delimiter for the storage service. * * @param pathDelimiter the path delimiter to set - * @return the builder. + * @return the builder */ public Builder pathDelimiter(String pathDelimiter) { this.pathDelimiter = pathDelimiter; @@ -65,28 +85,25 @@ public StorageOptions build() { } private StorageOptions(Builder builder) { - super(builder); + super(StorageFactory.class, StorageRpcFactory.class, builder); pathDelimiter = MoreObjects.firstNonNull(builder.pathDelimiter, DEFAULT_PATH_DELIMITER); } + @SuppressWarnings("unchecked") @Override - protected Set scopes() { - return SCOPES; + protected StorageFactory defaultServiceFactory() { + return DefaultStorageFactory.INSTANCE; } - StorageRpc storageRpc() { - if (storageRpc != null) { - return storageRpc; - } - if (serviceRpcFactory() != null) { - storageRpc = serviceRpcFactory().create(this); - } else { - storageRpc = createRpc(this, StorageRpcFactory.class); - if (storageRpc == null) { - storageRpc = new DefaultStorageRpc(this); - } - } - return storageRpc; + @SuppressWarnings("unchecked") + @Override + protected StorageRpcFactory defaultRpcFactory() { + return DefaultStorageRpcFactory.INSTANCE; + } + + @Override + protected Set scopes() { + return SCOPES; } /** @@ -96,6 +113,14 @@ public String pathDelimiter() { return pathDelimiter; } + /** + * Returns a default {@code StorageOptions} instance. + */ + public static StorageOptions defaultInstance() { + return builder().build(); + } + + @SuppressWarnings("unchecked") @Override public Builder toBuilder() { return new Builder(this); @@ -115,10 +140,6 @@ public boolean equals(Object obj) { return baseEquals(other) && Objects.equals(pathDelimiter, other.pathDelimiter); } - public static StorageOptions defaultInstance() { - return builder().build(); - } - public static Builder builder() { return new Builder(); } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/package-info.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/package-info.java index b4a701fde840..fda14ea2e808 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/package-info.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/package-info.java @@ -17,12 +17,11 @@ /** * A client to Google Cloud Storage. * - *

      A simple usage example: - *

      {@code
      - * StorageOptions options = StorageOptions.builder().projectId("project").build();
      - * Storage storage = StorageFactory.instance().get(options);
      + * 

      Here's a simple usage example for using gcloud-java from App/Compute Engine: + *

       {@code
      + * Storage storage = StorageOptions.defaultInstance().service();
        * BlobId blobId = BlobId.of("bucket", "blob_name");
      - * Blob blob = Blob.load(storage, blobId);
      + * Blob blob = Blob.get(storage, blobId);
        * if (blob == null) {
        *   BlobInfo blobInfo = BlobInfo.builder(blobId).contentType("text/plain").build();
        *   storage.create(blobInfo, "Hello, Cloud Storage!".getBytes(UTF_8));
      @@ -35,6 +34,11 @@
        *   channel.close();
        * }}
      * + *

      When using gcloud-java from outside of App/Compute Engine, you have to specify a + * project ID and + * provide + * credentials. * @see Google Cloud Storage */ package com.google.gcloud.storage; diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/RemoteGcsHelper.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/RemoteGcsHelper.java index 1e154e3f8eea..024aa04eba1b 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/RemoteGcsHelper.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/RemoteGcsHelper.java @@ -17,16 +17,14 @@ package com.google.gcloud.storage.testing; import com.google.gcloud.AuthCredentials; -import com.google.gcloud.storage.BlobInfo; import com.google.gcloud.RetryParams; +import com.google.gcloud.storage.BlobInfo; import com.google.gcloud.storage.Storage; import com.google.gcloud.storage.StorageException; import com.google.gcloud.storage.StorageOptions; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.InputStream; import java.io.IOException; +import java.io.InputStream; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; @@ -39,14 +37,19 @@ import java.util.logging.Logger; /** - * Utility to create a remote storage configuration for testing + * Utility to create a remote storage configuration for testing. Storage options can be obtained via + * the {@link #options()} method. Returned options have custom {@link StorageOptions#retryParams()}: + * {@link RetryParams#retryMaxAttempts()} is {@code 10}, {@link RetryParams#retryMinAttempts()} is + * {@code 6}, {@link RetryParams#maxRetryDelayMillis()} is {@code 30000}, + * {@link RetryParams#totalRetryPeriodMillis()} is {@code 120000} and + * {@link RetryParams#initialRetryDelayMillis()} is {@code 250}. + * {@link StorageOptions#connectTimeout()} and {@link StorageOptions#readTimeout()} are both set + * to {@code 60000}. */ public class RemoteGcsHelper { private static final Logger log = Logger.getLogger(RemoteGcsHelper.class.getName()); private static final String BUCKET_NAME_PREFIX = "gcloud-test-bucket-temp-"; - private static final String PROJECT_ID_ENV_VAR = "GCLOUD_TESTS_PROJECT_ID"; - private static final String PRIVATE_KEY_ENV_VAR = "GCLOUD_TESTS_KEY"; private final StorageOptions options; private RemoteGcsHelper(StorageOptions options) { @@ -62,13 +65,16 @@ public StorageOptions options() { /** * Deletes a bucket, even if non-empty. Objects in the bucket are listed and deleted until bucket - * deletion succeeds or {@code timeout} expires. + * deletion succeeds or {@code timeout} expires. To allow for the timeout, this method uses a + * separate thread to send the delete requests. Use + * {@link #forceDelete(Storage storage, String bucket)} if spawning an additional thread is + * undesirable, such as in the App Engine production runtime. * * @param storage the storage service to be used to issue requests * @param bucket the bucket to be deleted * @param timeout the maximum time to wait * @param unit the time unit of the timeout argument - * @return true if deletion succeeded, false if timeout expired. + * @return true if deletion succeeded, false if timeout expired * @throws InterruptedException if the thread deleting the bucket is interrupted while waiting * @throws ExecutionException if an exception was thrown while deleting bucket or bucket objects */ @@ -85,6 +91,17 @@ public static Boolean forceDelete(Storage storage, String bucket, long timeout, } } + /** + * Deletes a bucket, even if non-empty. This method blocks until the deletion completes or fails. + * + * @param storage the storage service to be used to issue requests + * @param bucket the bucket to be deleted + * @throws StorageException if an exception is encountered during bucket deletion + */ + public static void forceDelete(Storage storage, String bucket) { + new DeleteBucketTask(storage, bucket).call(); + } + /** * Returns a bucket name generated using a random UUID. */ @@ -97,7 +114,7 @@ public static String generateBucketName() { * * @param projectId id of the project to be used for running the tests * @param keyStream input stream for a JSON key - * @return A {@code RemoteGcsHelper} object for the provided options. + * @return A {@code RemoteGcsHelper} object for the provided options * @throws com.google.gcloud.storage.testing.RemoteGcsHelper.GcsHelperException if * {@code keyStream} is not a valid JSON key stream */ @@ -107,13 +124,7 @@ public static RemoteGcsHelper create(String projectId, InputStream keyStream) StorageOptions storageOptions = StorageOptions.builder() .authCredentials(AuthCredentials.createForJson(keyStream)) .projectId(projectId) - .retryParams(RetryParams.builder() - .retryMaxAttempts(10) - .retryMinAttempts(6) - .maxRetryDelayMillis(30000) - .totalRetryPeriodMillis(120000) - .initialRetryDelayMillis(250) - .build()) + .retryParams(retryParams()) .connectTimeout(60000) .readTimeout(60000) .build(); @@ -127,59 +138,26 @@ public static RemoteGcsHelper create(String projectId, InputStream keyStream) } /** - * Creates a {@code RemoteGcsHelper} object for the given project id and JSON key path. - * - * @param projectId id of the project to be used for running the tests - * @param keyPath path to the JSON key to be used for running the tests - * @return A {@code RemoteGcsHelper} object for the provided options. - * @throws com.google.gcloud.storage.testing.RemoteGcsHelper.GcsHelperException if the file - * pointed by {@code keyPath} does not exist + * Creates a {@code RemoteGcsHelper} object using default project id and authentication + * credentials. */ - public static RemoteGcsHelper create(String projectId, String keyPath) - throws GcsHelperException { - try { - InputStream keyFileStream = new FileInputStream(keyPath); - return create(projectId, keyFileStream); - } catch (FileNotFoundException ex) { - if (log.isLoggable(Level.WARNING)) { - log.log(Level.WARNING, ex.getMessage()); - } - throw GcsHelperException.translate(ex); - } catch (IOException ex) { - if (log.isLoggable(Level.WARNING)) { - log.log(Level.WARNING, ex.getMessage()); - } - throw GcsHelperException.translate(ex); - } + public static RemoteGcsHelper create() throws GcsHelperException { + StorageOptions storageOptions = StorageOptions.builder() + .retryParams(retryParams()) + .connectTimeout(60000) + .readTimeout(60000) + .build(); + return new RemoteGcsHelper(storageOptions); } - /** - * Creates a {@code RemoteGcsHelper} object. Project id and path to JSON key are read from two - * environment variables: {@code GCLOUD_TESTS_PROJECT_ID} and {@code GCLOUD_TESTS_KEY}. - * - * @return A {@code RemoteGcsHelper} object for the provided options. - * @throws com.google.gcloud.storage.testing.RemoteGcsHelper.GcsHelperException if environment - * variables {@code GCLOUD_TESTS_PROJECT_ID} and {@code GCLOUD_TESTS_KEY} are not set or if - * the file pointed by {@code GCLOUD_TESTS_KEY} does not exist - */ - public static RemoteGcsHelper create() throws GcsHelperException { - String projectId = System.getenv(PROJECT_ID_ENV_VAR); - String keyPath = System.getenv(PRIVATE_KEY_ENV_VAR); - if (projectId == null) { - String message = "Environment variable " + PROJECT_ID_ENV_VAR + " not set"; - if (log.isLoggable(Level.WARNING)) { - log.log(Level.WARNING, message); - } - throw new GcsHelperException(message); - } - if (keyPath == null) { - String message = "Environment variable " + PRIVATE_KEY_ENV_VAR + " not set"; - if (log.isLoggable(Level.WARNING)) { - log.log(Level.WARNING, message); - } - throw new GcsHelperException(message); - } - return create(projectId, keyPath); + private static RetryParams retryParams() { + return RetryParams.builder() + .retryMaxAttempts(10) + .retryMinAttempts(6) + .maxRetryDelayMillis(30000) + .totalRetryPeriodMillis(120000) + .initialRetryDelayMillis(250) + .build(); } private static class DeleteBucketTask implements Callable { @@ -193,9 +171,9 @@ public DeleteBucketTask(Storage storage, String bucket) { } @Override - public Boolean call() throws Exception { + public Boolean call() { while (true) { - for (BlobInfo info : storage.list(bucket)) { + for (BlobInfo info : storage.list(bucket).values()) { storage.delete(bucket, info.name()); } try { @@ -203,7 +181,12 @@ public Boolean call() throws Exception { return true; } catch (StorageException e) { if (e.code() == 409) { - Thread.sleep(500); + try { + Thread.sleep(500); + } catch (InterruptedException interruptedException) { + Thread.currentThread().interrupt(); + throw e; + } } else { throw e; } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/package-info.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/package-info.java index eca45b4b6306..8afdd8a9660d 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/package-info.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/testing/package-info.java @@ -18,10 +18,11 @@ * A testing helper for Google Cloud Storage. * *

      A simple usage example: + * *

      Before the test: *

       {@code
      - * RemoteGcsHelper gcsHelper = RemoteGcsHelper.create(PROJECT_ID, "/path/to/JSON/key.json");
      - * Storage storage = StorageFactory.instance().get(gcsHelper.options());
      + * RemoteGcsHelper gcsHelper = RemoteGcsHelper.create();
      + * Storage storage = gcsHelper.options().service();
        * String bucket = RemoteGcsHelper.generateBucketName();
        * storage.create(BucketInfo.of(bucket));
        * } 
      diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/AclTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/AclTest.java index 6a11fb0b2810..1c62805b2a1b 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/AclTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/AclTest.java @@ -82,8 +82,8 @@ public void testRawEntity() { @Test - public void testAcl() { - Acl acl = new Acl(User.ofAllUsers(), Role.READER); + public void testOf() { + Acl acl = Acl.of(User.ofAllUsers(), Role.READER); assertEquals(User.ofAllUsers(), acl.entity()); assertEquals(Role.READER, acl.role()); ObjectAccessControl objectPb = acl.toObjectPb(); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BaseListResultTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BaseListResultTest.java deleted file mode 100644 index 4c22edbc35c8..000000000000 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BaseListResultTest.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.gcloud.storage; - -import static org.junit.Assert.assertEquals; - -import com.google.common.collect.ImmutableList; - -import org.junit.Test; - -import java.util.Collections; - -public class BaseListResultTest { - - @Test - public void testListResult() throws Exception { - ImmutableList values = ImmutableList.of("1", "2"); - final BaseListResult nextResult = - new BaseListResult<>(null, "c", Collections.emptyList()); - BaseListResult.NextPageFetcher fetcher = new BaseListResult.NextPageFetcher() { - - @Override - public BaseListResult nextPage() { - return nextResult; - } - }; - BaseListResult result = new BaseListResult<>(fetcher, "c", values); - assertEquals(nextResult, result.nextPage()); - assertEquals("c", result.nextPageCursor()); - assertEquals(values, ImmutableList.copyOf(result.iterator())); - - } -} diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchRequestTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchRequestTest.java index 96b73c871468..63972ff85dfd 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchRequestTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchRequestTest.java @@ -19,9 +19,11 @@ import static com.google.gcloud.storage.Storage.PredefinedAcl.PUBLIC_READ; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import com.google.common.collect.Iterables; +import com.google.gcloud.storage.Storage.BlobGetOption; import com.google.gcloud.storage.Storage.BlobSourceOption; import com.google.gcloud.storage.Storage.BlobTargetOption; @@ -35,24 +37,28 @@ public class BatchRequestTest { @Test public void testBatchRequest() { BatchRequest request = BatchRequest.builder() - .delete("b1", "o1") + .delete(BlobId.of("b1", "o1", 1L), BlobSourceOption.generationMatch()) .delete("b1", "o2", BlobSourceOption.generationMatch(1), BlobSourceOption.metagenerationMatch(2)) .update(BlobInfo.builder("b2", "o1").build(), BlobTargetOption.predefinedAcl(PUBLIC_READ)) .update(BlobInfo.builder("b2", "o2").build()) - .get("b3", "o1") - .get("b3", "o2", BlobSourceOption.generationMatch(1)) + .get(BlobId.of("b3", "o1", 1L), BlobGetOption.generationMatch()) + .get("b3", "o2", BlobGetOption.generationMatch(1)) .get("b3", "o3") .build(); Iterator>> deletes = request .toDelete().entrySet().iterator(); Entry> delete = deletes.next(); - assertEquals(BlobId.of("b1", "o1"), delete.getKey()); - assertTrue(Iterables.isEmpty(delete.getValue())); + assertEquals(BlobId.of("b1", "o1", 1L), delete.getKey()); + assertEquals(1, Iterables.size(delete.getValue())); + assertEquals(BlobSourceOption.generationMatch(), Iterables.getFirst(delete.getValue(), null)); delete = deletes.next(); assertEquals(BlobId.of("b1", "o2"), delete.getKey()); assertEquals(2, Iterables.size(delete.getValue())); + assertEquals(BlobSourceOption.generationMatch(1L), Iterables.getFirst(delete.getValue(), null)); + assertEquals(BlobSourceOption.metagenerationMatch(2L), + Iterables.get(delete.getValue(), 1, null)); assertFalse(deletes.hasNext()); Iterator>> updates = request @@ -67,19 +73,70 @@ public void testBatchRequest() { assertTrue(Iterables.isEmpty(update.getValue())); assertFalse(updates.hasNext()); - Iterator>> gets = request - .toGet().entrySet().iterator(); - Entry> get = gets.next(); - assertEquals(BlobId.of("b3", "o1"), get.getKey()); - assertTrue(Iterables.isEmpty(get.getValue())); + Iterator>> gets = request.toGet().entrySet().iterator(); + Entry> get = gets.next(); + assertEquals(BlobId.of("b3", "o1", 1L), get.getKey()); + assertEquals(1, Iterables.size(get.getValue())); + assertEquals(BlobGetOption.generationMatch(), Iterables.getFirst(get.getValue(), null)); get = gets.next(); assertEquals(BlobId.of("b3", "o2"), get.getKey()); assertEquals(1, Iterables.size(get.getValue())); - assertEquals(BlobSourceOption.generationMatch(1), - Iterables.getFirst(get.getValue(), null)); + assertEquals(BlobGetOption.generationMatch(1), Iterables.getFirst(get.getValue(), null)); get = gets.next(); assertEquals(BlobId.of("b3", "o3"), get.getKey()); assertTrue(Iterables.isEmpty(get.getValue())); assertFalse(gets.hasNext()); } + + @Test + public void testEquals() { + BatchRequest request = BatchRequest.builder() + .delete("b1", "o1") + .delete("b1", "o2") + .update(BlobInfo.builder("b2", "o1").build()) + .update(BlobInfo.builder("b2", "o2").build()) + .get("b3", "o1") + .get("b3", "o2") + .build(); + BatchRequest requestEquals = BatchRequest.builder() + .delete("b1", "o1") + .delete("b1", "o2") + .update(BlobInfo.builder("b2", "o1").build()) + .update(BlobInfo.builder("b2", "o2").build()) + .get("b3", "o1") + .get("b3", "o2") + .build(); + BatchRequest requestNotEquals1 = BatchRequest.builder() + .delete("b1", "o1") + .delete("b1", "o3") + .update(BlobInfo.builder("b2", "o1").build()) + .update(BlobInfo.builder("b2", "o2").build()) + .get("b3", "o1") + .get("b3", "o2") + .build(); + BatchRequest requestNotEquals2 = BatchRequest.builder() + .delete("b1", "o1") + .delete("b1", "o2") + .update(BlobInfo.builder("b2", "o1").build()) + .update(BlobInfo.builder("b2", "o3").build()) + .get("b3", "o1") + .get("b3", "o2") + .build(); + BatchRequest requestNotEquals3 = BatchRequest.builder() + .delete("b1", "o1") + .delete("b1", "o2") + .update(BlobInfo.builder("b2", "o1").build()) + .update(BlobInfo.builder("b2", "o2").build()) + .get("b3", "o1") + .get("b3", "o3") + .build(); + assertEquals(request, requestEquals); + assertEquals(request.hashCode(), requestEquals.hashCode()); + assertNotEquals(request, requestNotEquals1); + assertNotEquals(request.hashCode(), requestNotEquals1.hashCode()); + assertNotEquals(request, requestNotEquals2); + assertNotEquals(request.hashCode(), requestNotEquals2.hashCode()); + assertNotEquals(request, requestNotEquals3); + assertNotEquals(request.hashCode(), requestNotEquals3.hashCode()); + } } diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchResponseTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchResponseTest.java index 59c1da91b3fd..5985329e0183 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchResponseTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BatchResponseTest.java @@ -17,6 +17,7 @@ package com.google.gcloud.storage; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; import com.google.common.collect.ImmutableList; import com.google.gcloud.storage.BatchResponse.Result; @@ -34,12 +35,38 @@ public class BatchResponseTest { @Test public void testBatchResponse() { List> deletes = ImmutableList.of(Result.of(true), Result.of(false)); - List> updates = ImmutableList.of(Result.of(BLOB_INFO_1), Result.of(BLOB_INFO_2)); + List> updates = + ImmutableList.of(Result.of(BLOB_INFO_1), Result.of(BLOB_INFO_2)); List> gets = ImmutableList.of(Result.of(BLOB_INFO_2), Result.of(BLOB_INFO_3)); BatchResponse response = new BatchResponse(deletes, updates, gets); - assertEquals(deletes, response.deletes()); assertEquals(updates, response.updates()); assertEquals(gets, response.gets()); } + + @Test + public void testEquals() { + List> deletes = ImmutableList.of(Result.of(true), Result.of(false)); + List> updates = + ImmutableList.of(Result.of(BLOB_INFO_1), Result.of(BLOB_INFO_2)); + List> gets = ImmutableList.of(Result.of(BLOB_INFO_2), Result.of(BLOB_INFO_3)); + List> otherDeletes = ImmutableList.of(Result.of(false), Result.of(true)); + List> otherUpdates = + ImmutableList.of(Result.of(BLOB_INFO_2), Result.of(BLOB_INFO_3)); + List> otherGets = + ImmutableList.of(Result.of(BLOB_INFO_1), Result.of(BLOB_INFO_2)); + BatchResponse response = new BatchResponse(deletes, updates, gets); + BatchResponse responseEquals = new BatchResponse(deletes, updates, gets); + BatchResponse responseNotEquals1 = new BatchResponse(otherDeletes, updates, gets); + BatchResponse responseNotEquals2 = new BatchResponse(deletes, otherUpdates, gets); + BatchResponse responseNotEquals3 = new BatchResponse(deletes, updates, otherGets); + assertEquals(response, responseEquals); + assertEquals(response.hashCode(), responseEquals.hashCode()); + assertNotEquals(response, responseNotEquals1); + assertNotEquals(response.hashCode(), responseNotEquals1.hashCode()); + assertNotEquals(response, responseNotEquals2); + assertNotEquals(response.hashCode(), responseNotEquals2.hashCode()); + assertNotEquals(response, responseNotEquals3); + assertNotEquals(response.hashCode(), responseNotEquals3.hashCode()); + } } diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobInfoTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobInfoTest.java index 70560b0c9a9e..a1cc01f4287c 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobInfoTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobInfoTest.java @@ -34,8 +34,8 @@ public class BlobInfoTest { private static final List ACL = ImmutableList.of( - new Acl(User.ofAllAuthenticatedUsers(), READER), - new Acl(new Project(VIEWERS, "p1"), WRITER)); + Acl.of(User.ofAllAuthenticatedUsers(), READER), + Acl.of(new Project(VIEWERS, "p1"), WRITER)); private static final Integer COMPONENT_COUNT = 2; private static final String CONTENT_TYPE = "text/html"; private static final String CACHE_CONTROL = "cache"; @@ -55,7 +55,7 @@ public class BlobInfoTest { private static final String SELF_LINK = "http://storage/b/n"; private static final Long SIZE = 1024L; private static final Long UPDATE_TIME = DELETE_TIME - 1L; - private static final BlobInfo BLOB_INFO = BlobInfo.builder("b", "n") + private static final BlobInfo BLOB_INFO = BlobInfo.builder("b", "n", GENERATION) .acl(ACL) .componentCount(COMPONENT_COUNT) .contentType(CONTENT_TYPE) @@ -66,7 +66,6 @@ public class BlobInfoTest { .crc32c(CRC32) .deleteTime(DELETE_TIME) .etag(ETAG) - .generation(GENERATION) .id(ID) .md5(MD5) .mediaLink(MEDIA_LINK) @@ -85,10 +84,16 @@ public void testToBuilder() { assertEquals("n2", blobInfo.name()); assertEquals("b2", blobInfo.bucket()); assertEquals(Long.valueOf(200), blobInfo.size()); - blobInfo = blobInfo.toBuilder().blobId(BlobId.of("b", "n")).size(SIZE).build(); + blobInfo = blobInfo.toBuilder().blobId(BlobId.of("b", "n", GENERATION)).size(SIZE).build(); compareBlobs(BLOB_INFO, blobInfo); } + @Test + public void testToBuilderIncomplete() { + BlobInfo incompleteBlobInfo = BlobInfo.builder(BlobId.of("b2", "n2")).build(); + compareBlobs(incompleteBlobInfo, incompleteBlobInfo.toBuilder().build()); + } + @Test public void testBuilder() { assertEquals("b", BLOB_INFO.bucket()); @@ -150,6 +155,6 @@ public void testToPbAndFromPb() { @Test public void testBlobId() { - assertEquals(BlobId.of("b", "n"), BLOB_INFO.blobId()); + assertEquals(BlobId.of("b", "n", GENERATION), BLOB_INFO.blobId()); } } diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobListResultTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobListResultTest.java deleted file mode 100644 index 117e2b692c6b..000000000000 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobListResultTest.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.gcloud.storage; - -import static org.easymock.EasyMock.createStrictMock; -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.replay; -import static org.easymock.EasyMock.verify; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; - -import com.google.common.collect.ImmutableList; -import java.util.Iterator; - -import org.junit.Before; -import org.junit.Test; - -public class BlobListResultTest { - - private static final Iterable FIRST_PAGE_RESULTS = ImmutableList.of( - BlobInfo.builder("b1", "n1").build(), - BlobInfo.builder("b2", "n2").build()); - - private static final Iterable SECOND_PAGE_RESULTS = ImmutableList.of( - BlobInfo.builder("b1", "n1").build(), - BlobInfo.builder("b2", "n2").build()); - - private BaseListResult firstPage; - private BaseListResult secondPage; - private Storage storage; - private BlobListResult blobListResult; - - @Before - public void setUp() throws Exception { - firstPage = createStrictMock(BaseListResult.class); - secondPage = createStrictMock(BaseListResult.class); - storage = createStrictMock(Storage.class); - blobListResult = new BlobListResult(storage, firstPage); - } - - @Test - public void testListResult() throws Exception { - expect(firstPage.iterator()).andReturn(FIRST_PAGE_RESULTS.iterator()); - replay(firstPage); - Iterator firstPageIterator = FIRST_PAGE_RESULTS.iterator(); - Iterator blobListIterator = blobListResult.iterator(); - while (blobListIterator.hasNext() && firstPageIterator.hasNext()) { - assertEquals(firstPageIterator.next(), blobListIterator.next().info()); - } - assertFalse(blobListIterator.hasNext()); - assertFalse(firstPageIterator.hasNext()); - verify(firstPage); - } - - @Test - public void testCursor() throws Exception { - expect(firstPage.nextPageCursor()).andReturn("c"); - replay(firstPage); - assertEquals("c", blobListResult.nextPageCursor()); - verify(firstPage); - } - - @Test - public void testNextPage() throws Exception { - expect(firstPage.nextPage()).andReturn(secondPage); - expect(secondPage.iterator()).andReturn(SECOND_PAGE_RESULTS.iterator()); - replay(firstPage); - replay(secondPage); - ListResult nextPageResult = blobListResult.nextPage(); - Iterator secondPageIterator = SECOND_PAGE_RESULTS.iterator(); - Iterator blobListIterator = nextPageResult.iterator(); - while (blobListIterator.hasNext() && secondPageIterator.hasNext()) { - assertEquals(secondPageIterator.next(), blobListIterator.next().info()); - } - assertFalse(blobListIterator.hasNext()); - assertFalse(secondPageIterator.hasNext()); - verify(firstPage); - verify(secondPage); - } -} diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelImplTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelTest.java similarity index 51% rename from gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelImplTest.java rename to gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelTest.java index e8ed915581b8..ffb37e8c5032 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelImplTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobReadChannelTest.java @@ -16,76 +16,82 @@ package com.google.gcloud.storage; +import static org.easymock.EasyMock.anyObject; +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.verify; import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import com.google.common.collect.ImmutableMap; +import com.google.gcloud.ReadChannel; import com.google.gcloud.RestorableState; import com.google.gcloud.RetryParams; import com.google.gcloud.spi.StorageRpc; +import com.google.gcloud.spi.StorageRpcFactory; -import org.easymock.EasyMock; -import org.junit.Test; +import org.junit.After; import org.junit.Before; +import org.junit.Test; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Map; import java.util.Random; -import org.junit.After; -public class BlobReadChannelImplTest { +public class BlobReadChannelTest { private static final String BUCKET_NAME = "b"; private static final String BLOB_NAME = "n"; - private static final BlobId BLOB_ID = BlobId.of(BUCKET_NAME, BLOB_NAME); + private static final BlobId BLOB_ID = BlobId.of(BUCKET_NAME, BLOB_NAME, -1L); private static final Map EMPTY_RPC_OPTIONS = ImmutableMap.of(); private static final int DEFAULT_CHUNK_SIZE = 2 * 1024 * 1024; private static final int CUSTOM_CHUNK_SIZE = 2 * 1024 * 1024; private static final Random RANDOM = new Random(); - private StorageOptions optionsMock; + private StorageOptions options; + private StorageRpcFactory rpcFactoryMock; private StorageRpc storageRpcMock; - private BlobReadChannelImpl reader; + private BlobReadChannel reader; @Before - public void setUp() throws IOException, InterruptedException { - optionsMock = EasyMock.createMock(StorageOptions.class); - storageRpcMock = EasyMock.createMock(StorageRpc.class); + public void setUp() { + rpcFactoryMock = createMock(StorageRpcFactory.class); + storageRpcMock = createMock(StorageRpc.class); + expect(rpcFactoryMock.create(anyObject(StorageOptions.class))).andReturn(storageRpcMock); + replay(rpcFactoryMock); + options = StorageOptions.builder() + .projectId("projectId") + .serviceRpcFactory(rpcFactoryMock) + .retryParams(RetryParams.noRetries()) + .build(); } @After public void tearDown() throws Exception { - verify(optionsMock); - verify(storageRpcMock); + verify(rpcFactoryMock, storageRpcMock); } @Test public void testCreate() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.replay(optionsMock); - EasyMock.replay(storageRpcMock); - reader = new BlobReadChannelImpl(optionsMock, BLOB_ID, EMPTY_RPC_OPTIONS); + replay(storageRpcMock); + reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); assertTrue(reader.isOpen()); } @Test public void testReadBuffered() throws IOException { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); - EasyMock.replay(optionsMock); - reader = new BlobReadChannelImpl(optionsMock, BLOB_ID, EMPTY_RPC_OPTIONS); + reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); byte[] result = randomByteArray(DEFAULT_CHUNK_SIZE); ByteBuffer firstReadBuffer = ByteBuffer.allocate(42); ByteBuffer secondReadBuffer = ByteBuffer.allocate(42); - EasyMock - .expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) - .andReturn(result); - EasyMock.replay(storageRpcMock); + expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) + .andReturn(StorageRpc.Tuple.of("etag", result)); + replay(storageRpcMock); reader.read(firstReadBuffer); reader.read(secondReadBuffer); assertArrayEquals(Arrays.copyOf(result, firstReadBuffer.capacity()), firstReadBuffer.array()); @@ -97,24 +103,18 @@ public void testReadBuffered() throws IOException { @Test public void testReadBig() throws IOException { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()).times(2); - EasyMock.replay(optionsMock); - reader = new BlobReadChannelImpl(optionsMock, BLOB_ID, EMPTY_RPC_OPTIONS); + reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); reader.chunkSize(CUSTOM_CHUNK_SIZE); byte[] firstResult = randomByteArray(DEFAULT_CHUNK_SIZE); byte[] secondResult = randomByteArray(DEFAULT_CHUNK_SIZE); ByteBuffer firstReadBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE); ByteBuffer secondReadBuffer = ByteBuffer.allocate(42); - EasyMock - .expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) - .andReturn(firstResult); - EasyMock - .expect( - storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, DEFAULT_CHUNK_SIZE, - CUSTOM_CHUNK_SIZE)) - .andReturn(secondResult); - EasyMock.replay(storageRpcMock); + expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) + .andReturn(StorageRpc.Tuple.of("etag", firstResult)); + expect(storageRpcMock.read( + BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, DEFAULT_CHUNK_SIZE, CUSTOM_CHUNK_SIZE)) + .andReturn(StorageRpc.Tuple.of("etag", secondResult)); + replay(storageRpcMock); reader.read(firstReadBuffer); reader.read(secondReadBuffer); assertArrayEquals(firstResult, firstReadBuffer.array()); @@ -124,42 +124,32 @@ public void testReadBig() throws IOException { @Test public void testReadFinish() throws IOException { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); - EasyMock.replay(optionsMock); - reader = new BlobReadChannelImpl(optionsMock, BLOB_ID, EMPTY_RPC_OPTIONS); + reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); byte[] result = {}; ByteBuffer readBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE); - EasyMock - .expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) - .andReturn(result); - EasyMock.replay(storageRpcMock); + expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) + .andReturn(StorageRpc.Tuple.of("etag", result)); + replay(storageRpcMock); assertEquals(-1, reader.read(readBuffer)); } @Test public void testSeek() throws IOException { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); - EasyMock.replay(optionsMock); - reader = new BlobReadChannelImpl(optionsMock, BLOB_ID, EMPTY_RPC_OPTIONS); + reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); reader.seek(42); byte[] result = randomByteArray(DEFAULT_CHUNK_SIZE); ByteBuffer readBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE); - EasyMock - .expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 42, DEFAULT_CHUNK_SIZE)) - .andReturn(result); - EasyMock.replay(storageRpcMock); + expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 42, DEFAULT_CHUNK_SIZE)) + .andReturn(StorageRpc.Tuple.of("etag", result)); + replay(storageRpcMock); reader.read(readBuffer); assertArrayEquals(result, readBuffer.array()); } @Test - public void testClose() throws IOException { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.replay(optionsMock); - EasyMock.replay(storageRpcMock); - reader = new BlobReadChannelImpl(optionsMock, BLOB_ID, EMPTY_RPC_OPTIONS); + public void testClose() { + replay(storageRpcMock); + reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); assertTrue(reader.isOpen()); reader.close(); assertTrue(!reader.isOpen()); @@ -167,10 +157,8 @@ public void testClose() throws IOException { @Test public void testReadClosed() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.replay(optionsMock); - EasyMock.replay(storageRpcMock); - reader = new BlobReadChannelImpl(optionsMock, BLOB_ID, EMPTY_RPC_OPTIONS); + replay(storageRpcMock); + reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); reader.close(); try { ByteBuffer readBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE); @@ -182,25 +170,45 @@ public void testReadClosed() { } @Test - public void testSaveAndRestore() throws IOException, ClassNotFoundException { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock).times(2); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()).times(2); - EasyMock.replay(optionsMock); + public void testReadGenerationChanged() throws IOException { + BlobId blobId = BlobId.of(BUCKET_NAME, BLOB_NAME); + reader = new BlobReadChannel(options, blobId, EMPTY_RPC_OPTIONS); + byte[] firstResult = randomByteArray(DEFAULT_CHUNK_SIZE); + byte[] secondResult = randomByteArray(DEFAULT_CHUNK_SIZE); + ByteBuffer firstReadBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE); + ByteBuffer secondReadBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE); + expect(storageRpcMock.read(blobId.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) + .andReturn(StorageRpc.Tuple.of("etag1", firstResult)); + expect( + storageRpcMock.read(blobId.toPb(), EMPTY_RPC_OPTIONS, DEFAULT_CHUNK_SIZE, + DEFAULT_CHUNK_SIZE)).andReturn(StorageRpc.Tuple.of("etag2", secondResult)); + replay(storageRpcMock); + reader.read(firstReadBuffer); + try { + reader.read(secondReadBuffer); + fail("Expected ReadChannel read to throw StorageException"); + } catch (StorageException ex) { + StringBuilder messageBuilder = new StringBuilder(); + messageBuilder.append("Blob ").append(blobId).append(" was updated while reading"); + assertEquals(messageBuilder.toString(), ex.getMessage()); + } + } + + @Test + public void testSaveAndRestore() throws IOException { byte[] firstResult = randomByteArray(DEFAULT_CHUNK_SIZE); byte[] secondResult = randomByteArray(DEFAULT_CHUNK_SIZE); ByteBuffer firstReadBuffer = ByteBuffer.allocate(42); ByteBuffer secondReadBuffer = ByteBuffer.allocate(DEFAULT_CHUNK_SIZE); - EasyMock - .expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) - .andReturn(firstResult); - EasyMock - .expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 42, DEFAULT_CHUNK_SIZE)) - .andReturn(secondResult); - EasyMock.replay(storageRpcMock); - reader = new BlobReadChannelImpl(optionsMock, BLOB_ID, EMPTY_RPC_OPTIONS); + expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) + .andReturn(StorageRpc.Tuple.of("etag", firstResult)); + expect(storageRpcMock.read(BLOB_ID.toPb(), EMPTY_RPC_OPTIONS, 42, DEFAULT_CHUNK_SIZE)) + .andReturn(StorageRpc.Tuple.of("etag", secondResult)); + replay(storageRpcMock); + reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); reader.read(firstReadBuffer); - RestorableState readerState = reader.save(); - BlobReadChannel restoredReader = readerState.restore(); + RestorableState readerState = reader.capture(); + ReadChannel restoredReader = readerState.restore(); restoredReader.read(secondReadBuffer); assertArrayEquals(Arrays.copyOf(firstResult, firstReadBuffer.capacity()), firstReadBuffer.array()); @@ -209,13 +217,12 @@ public void testSaveAndRestore() throws IOException, ClassNotFoundException { @Test public void testStateEquals() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock).times(2); - EasyMock.replay(optionsMock); - EasyMock.replay(storageRpcMock); - reader = new BlobReadChannelImpl(optionsMock, BLOB_ID, EMPTY_RPC_OPTIONS); - BlobReadChannel secondReader = new BlobReadChannelImpl(optionsMock, BLOB_ID, EMPTY_RPC_OPTIONS); - RestorableState state = reader.save(); - RestorableState secondState = secondReader.save(); + replay(storageRpcMock); + reader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); + @SuppressWarnings("resource") // avoid closing when you don't want partial writes to GCS + ReadChannel secondReader = new BlobReadChannel(options, BLOB_ID, EMPTY_RPC_OPTIONS); + RestorableState state = reader.capture(); + RestorableState secondState = secondReader.capture(); assertEquals(state, secondState); assertEquals(state.hashCode(), secondState.hashCode()); assertEquals(state.toString(), secondState.toString()); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobTest.java index dddbb763f04c..586e7fd0fd39 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobTest.java @@ -25,16 +25,20 @@ import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import com.google.api.client.util.Lists; +import com.google.gcloud.ReadChannel; import com.google.gcloud.storage.Storage.CopyRequest; + import org.easymock.Capture; import org.junit.After; import org.junit.Before; import org.junit.Test; + import java.net.URL; import java.util.Arrays; import java.util.List; @@ -42,7 +46,7 @@ public class BlobTest { - private static final BlobInfo BLOB_INFO = BlobInfo.builder("b", "n").build(); + private static final BlobInfo BLOB_INFO = BlobInfo.builder("b", "n").metageneration(42L).build(); private static final BlobId[] BLOB_ID_ARRAY = {BlobId.of("b1", "n1"), BlobId.of("b2", "n2"), BlobId.of("b3", "n3")}; private static final BlobInfo[] BLOB_INFO_ARRAY = {BlobInfo.builder("b1", "n1").build(), @@ -70,14 +74,16 @@ public void testInfo() throws Exception { @Test public void testExists_True() throws Exception { - expect(storage.get(BLOB_INFO.blobId(), new Storage.BlobSourceOption[0])).andReturn(BLOB_INFO); + Storage.BlobGetOption[] expectedOptions = {Storage.BlobGetOption.fields()}; + expect(storage.get(BLOB_INFO.blobId(), expectedOptions)).andReturn(BLOB_INFO); replay(storage); assertTrue(blob.exists()); } @Test public void testExists_False() throws Exception { - expect(storage.get(BLOB_INFO.blobId(), new Storage.BlobSourceOption[0])).andReturn(null); + Storage.BlobGetOption[] expectedOptions = {Storage.BlobGetOption.fields()}; + expect(storage.get(BLOB_INFO.blobId(), expectedOptions)).andReturn(null); replay(storage); assertFalse(blob.exists()); } @@ -93,10 +99,28 @@ public void testContent() throws Exception { @Test public void testReload() throws Exception { BlobInfo updatedInfo = BLOB_INFO.toBuilder().cacheControl("c").build(); - expect(storage.get(BLOB_INFO.blobId(), new Storage.BlobSourceOption[0])).andReturn(updatedInfo); + expect(storage.get(BLOB_INFO.blobId(), new Storage.BlobGetOption[0])).andReturn(updatedInfo); replay(storage); Blob updatedBlob = blob.reload(); - assertSame(storage, blob.storage()); + assertSame(storage, updatedBlob.storage()); + assertEquals(updatedInfo, updatedBlob.info()); + } + + @Test + public void testReloadNull() throws Exception { + expect(storage.get(BLOB_INFO.blobId(), new Storage.BlobGetOption[0])).andReturn(null); + replay(storage); + assertNull(blob.reload()); + } + + @Test + public void testReloadWithOptions() throws Exception { + BlobInfo updatedInfo = BLOB_INFO.toBuilder().cacheControl("c").build(); + Storage.BlobGetOption[] options = {Storage.BlobGetOption.metagenerationMatch(42L)}; + expect(storage.get(BLOB_INFO.blobId(), options)).andReturn(updatedInfo); + replay(storage); + Blob updatedBlob = blob.reload(Blob.BlobSourceOption.metagenerationMatch()); + assertSame(storage, updatedBlob.storage()); assertEquals(updatedInfo, updatedBlob.info()); } @@ -119,47 +143,53 @@ public void testDelete() throws Exception { @Test public void testCopyToBucket() throws Exception { - BlobInfo target = BLOB_INFO.toBuilder().blobId(BlobId.of("bt", "n")).build(); + BlobInfo target = BlobInfo.builder(BlobId.of("bt", "n")).build(); + CopyWriter copyWriter = createMock(CopyWriter.class); Capture capturedCopyRequest = Capture.newInstance(); - expect(storage.copy(capture(capturedCopyRequest))).andReturn(target); + expect(storage.copy(capture(capturedCopyRequest))).andReturn(copyWriter); replay(storage); - Blob targetBlob = blob.copyTo("bt"); - assertEquals(target, targetBlob.info()); + CopyWriter returnedCopyWriter = blob.copyTo("bt"); + assertEquals(copyWriter, returnedCopyWriter); assertEquals(capturedCopyRequest.getValue().source(), blob.id()); assertEquals(capturedCopyRequest.getValue().target(), target); - assertSame(storage, targetBlob.storage()); + assertTrue(capturedCopyRequest.getValue().sourceOptions().isEmpty()); + assertTrue(capturedCopyRequest.getValue().targetOptions().isEmpty()); } @Test public void testCopyTo() throws Exception { - BlobInfo target = BLOB_INFO.toBuilder().blobId(BlobId.of("bt", "nt")).build(); + BlobInfo target = BlobInfo.builder(BlobId.of("bt", "nt")).build(); + CopyWriter copyWriter = createMock(CopyWriter.class); Capture capturedCopyRequest = Capture.newInstance(); - expect(storage.copy(capture(capturedCopyRequest))).andReturn(target); + expect(storage.copy(capture(capturedCopyRequest))).andReturn(copyWriter); replay(storage); - Blob targetBlob = blob.copyTo("bt", "nt"); - assertEquals(target, targetBlob.info()); + CopyWriter returnedCopyWriter = blob.copyTo("bt", "nt"); + assertEquals(copyWriter, returnedCopyWriter); assertEquals(capturedCopyRequest.getValue().source(), blob.id()); assertEquals(capturedCopyRequest.getValue().target(), target); - assertSame(storage, targetBlob.storage()); + assertTrue(capturedCopyRequest.getValue().sourceOptions().isEmpty()); + assertTrue(capturedCopyRequest.getValue().targetOptions().isEmpty()); } @Test public void testCopyToBlobId() throws Exception { BlobId targetId = BlobId.of("bt", "nt"); - BlobInfo target = BLOB_INFO.toBuilder().blobId(targetId).build(); + CopyWriter copyWriter = createMock(CopyWriter.class); + BlobInfo target = BlobInfo.builder(targetId).build(); Capture capturedCopyRequest = Capture.newInstance(); - expect(storage.copy(capture(capturedCopyRequest))).andReturn(target); + expect(storage.copy(capture(capturedCopyRequest))).andReturn(copyWriter); replay(storage); - Blob targetBlob = blob.copyTo(targetId); - assertEquals(target, targetBlob.info()); + CopyWriter returnedCopyWriter = blob.copyTo(targetId); + assertEquals(copyWriter, returnedCopyWriter); assertEquals(capturedCopyRequest.getValue().source(), blob.id()); assertEquals(capturedCopyRequest.getValue().target(), target); - assertSame(storage, targetBlob.storage()); + assertTrue(capturedCopyRequest.getValue().sourceOptions().isEmpty()); + assertTrue(capturedCopyRequest.getValue().targetOptions().isEmpty()); } @Test public void testReader() throws Exception { - BlobReadChannel channel = createMock(BlobReadChannel.class); + ReadChannel channel = createMock(ReadChannel.class); expect(storage.reader(BLOB_INFO.blobId())).andReturn(channel); replay(storage); assertSame(channel, blob.reader()); @@ -182,17 +212,23 @@ public void testSignUrl() throws Exception { } @Test - public void testGetNone() throws Exception { + public void testGetSome() throws Exception { + List blobInfoList = Arrays.asList(BLOB_INFO_ARRAY); + expect(storage.get(BLOB_ID_ARRAY)).andReturn(blobInfoList); replay(storage); - assertTrue(Blob.get(storage).isEmpty()); + List result = Blob.get(storage, BLOB_ID_ARRAY[0], BLOB_ID_ARRAY[1], BLOB_ID_ARRAY[2]); + assertEquals(blobInfoList.size(), result.size()); + for (int i = 0; i < blobInfoList.size(); i++) { + assertEquals(blobInfoList.get(i), result.get(i).info()); + } } @Test - public void testGetSome() throws Exception { + public void testGetSomeList() throws Exception { List blobInfoList = Arrays.asList(BLOB_INFO_ARRAY); expect(storage.get(BLOB_ID_ARRAY)).andReturn(blobInfoList); replay(storage); - List result = Blob.get(storage, BLOB_ID_ARRAY); + List result = Blob.get(storage, Arrays.asList(BLOB_ID_ARRAY)); assertEquals(blobInfoList.size(), result.size()); for (int i = 0; i < blobInfoList.size(); i++) { assertEquals(blobInfoList.get(i), result.get(i).info()); @@ -204,7 +240,7 @@ public void testGetSomeNull() throws Exception { List blobInfoList = Arrays.asList(BLOB_INFO_ARRAY[0], null, BLOB_INFO_ARRAY[2]); expect(storage.get(BLOB_ID_ARRAY)).andReturn(blobInfoList); replay(storage); - List result = Blob.get(storage, BLOB_ID_ARRAY); + List result = Blob.get(storage, BLOB_ID_ARRAY[0], BLOB_ID_ARRAY[1], BLOB_ID_ARRAY[2]); assertEquals(blobInfoList.size(), result.size()); for (int i = 0; i < blobInfoList.size(); i++) { if (blobInfoList.get(i) != null) { @@ -262,29 +298,65 @@ public void testDeleteNone() throws Exception { @Test public void testDeleteSome() throws Exception { - List deleleResultList = Arrays.asList(true, true, true); - expect(storage.delete(BLOB_ID_ARRAY)).andReturn(deleleResultList); + List deleteResult = Arrays.asList(true, true, true); + expect(storage.delete(BLOB_ID_ARRAY)).andReturn(deleteResult); replay(storage); List result = Blob.delete(storage, BLOB_ID_ARRAY); - assertEquals(deleleResultList.size(), result.size()); - for (int i = 0; i < deleleResultList.size(); i++) { - assertEquals(deleleResultList.get(i), result.get(i)); + assertEquals(deleteResult.size(), result.size()); + for (int i = 0; i < deleteResult.size(); i++) { + assertEquals(deleteResult.get(i), result.get(i)); } } @Test - public void testLoadFromString() throws Exception { - expect(storage.get(BLOB_INFO.blobId())).andReturn(BLOB_INFO); + public void testGetFromString() throws Exception { + expect(storage.get(BLOB_INFO.blobId(), new Storage.BlobGetOption[0])).andReturn(BLOB_INFO); + replay(storage); + Blob loadedBlob = Blob.get(storage, BLOB_INFO.bucket(), BLOB_INFO.name()); + assertEquals(BLOB_INFO, loadedBlob.info()); + } + + @Test + public void testGetFromId() throws Exception { + expect(storage.get(BLOB_INFO.blobId(), new Storage.BlobGetOption[0])).andReturn(BLOB_INFO); + replay(storage); + Blob loadedBlob = Blob.get(storage, BLOB_INFO.blobId()); + assertNotNull(loadedBlob); + assertEquals(BLOB_INFO, loadedBlob.info()); + } + + @Test + public void testGetFromStringNull() throws Exception { + expect(storage.get(BLOB_INFO.blobId(), new Storage.BlobGetOption[0])).andReturn(null); + replay(storage); + assertNull(Blob.get(storage, BLOB_INFO.bucket(), BLOB_INFO.name())); + } + + @Test + public void testGetFromIdNull() throws Exception { + expect(storage.get(BLOB_INFO.blobId(), new Storage.BlobGetOption[0])).andReturn(null); + replay(storage); + assertNull(Blob.get(storage, BLOB_INFO.blobId())); + } + + @Test + public void testGetFromStringWithOptions() throws Exception { + expect(storage.get(BLOB_INFO.blobId(), Storage.BlobGetOption.generationMatch(42L))) + .andReturn(BLOB_INFO); replay(storage); - Blob loadedBlob = Blob.load(storage, BLOB_INFO.bucket(), BLOB_INFO.name()); + Blob loadedBlob = Blob.get(storage, BLOB_INFO.bucket(), BLOB_INFO.name(), + Storage.BlobGetOption.generationMatch(42L)); assertEquals(BLOB_INFO, loadedBlob.info()); } @Test - public void testLoadFromId() throws Exception { - expect(storage.get(BLOB_INFO.blobId())).andReturn(BLOB_INFO); + public void testGetFromIdWithOptions() throws Exception { + expect(storage.get(BLOB_INFO.blobId(), Storage.BlobGetOption.generationMatch(42L))) + .andReturn(BLOB_INFO); replay(storage); - Blob loadedBlob = Blob.load(storage, BLOB_INFO.blobId()); + Blob loadedBlob = + Blob.get(storage, BLOB_INFO.blobId(), Storage.BlobGetOption.generationMatch(42L)); + assertNotNull(loadedBlob); assertEquals(BLOB_INFO, loadedBlob.info()); } } diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelImplTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelImplTest.java deleted file mode 100644 index ab3f7a000d90..000000000000 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelImplTest.java +++ /dev/null @@ -1,270 +0,0 @@ -/* - * Copyright 2015 Google Inc. All Rights Reserved. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.gcloud.storage; - -import static org.easymock.EasyMock.verify; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import com.google.common.collect.ImmutableMap; -import com.google.gcloud.RestorableState; -import com.google.gcloud.RetryParams; -import com.google.gcloud.spi.StorageRpc; - -import org.easymock.Capture; -import org.easymock.CaptureType; -import org.easymock.EasyMock; -import org.junit.After; -import org.junit.Test; -import org.junit.Before; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.Arrays; -import java.util.Map; -import java.util.Random; - -public class BlobWriteChannelImplTest { - - private static final String BUCKET_NAME = "b"; - private static final String BLOB_NAME = "n"; - private static final String UPLOAD_ID = "uploadid"; - private static final BlobInfo BLOB_INFO = BlobInfo.builder(BUCKET_NAME, BLOB_NAME).build(); - private static final Map EMPTY_RPC_OPTIONS = ImmutableMap.of(); - private static final int MIN_CHUNK_SIZE = 256 * 1024; - private static final int DEFAULT_CHUNK_SIZE = 8 * MIN_CHUNK_SIZE; - private static final int CUSTOM_CHUNK_SIZE = 4 * MIN_CHUNK_SIZE; - private static final Random RANDOM = new Random(); - - private StorageOptions optionsMock; - private StorageRpc storageRpcMock; - private BlobWriteChannelImpl writer; - - @Before - public void setUp() throws IOException, InterruptedException { - optionsMock = EasyMock.createMock(StorageOptions.class); - storageRpcMock = EasyMock.createMock(StorageRpc.class); - } - - @After - public void tearDown() throws Exception { - verify(optionsMock); - verify(storageRpcMock); - } - - @Test - public void testCreate() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.replay(optionsMock); - EasyMock.expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); - EasyMock.replay(storageRpcMock); - writer = new BlobWriteChannelImpl(optionsMock, BLOB_INFO, EMPTY_RPC_OPTIONS); - assertTrue(writer.isOpen()); - } - - @Test - public void testWriteWithoutFlush() throws IOException { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.replay(optionsMock); - EasyMock.expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); - EasyMock.replay(storageRpcMock); - writer = new BlobWriteChannelImpl(optionsMock, BLOB_INFO, EMPTY_RPC_OPTIONS); - assertEquals(MIN_CHUNK_SIZE, writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE))); - } - - @Test - public void testWriteWithFlush() throws IOException { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); - EasyMock.replay(optionsMock); - EasyMock.expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); - Capture capturedBuffer = Capture.newInstance(); - storageRpcMock.write(EasyMock.eq(UPLOAD_ID), EasyMock.capture(capturedBuffer), EasyMock.eq(0), - EasyMock.eq(BLOB_INFO.toPb()), EasyMock.eq(0L), EasyMock.eq(CUSTOM_CHUNK_SIZE), - EasyMock.eq(false)); - EasyMock.expectLastCall(); - EasyMock.replay(storageRpcMock); - writer = new BlobWriteChannelImpl(optionsMock, BLOB_INFO, EMPTY_RPC_OPTIONS); - writer.chunkSize(CUSTOM_CHUNK_SIZE); - ByteBuffer buffer = randomBuffer(CUSTOM_CHUNK_SIZE); - assertEquals(CUSTOM_CHUNK_SIZE, writer.write(buffer)); - assertArrayEquals(buffer.array(), capturedBuffer.getValue()); - } - - @Test - public void testWritesAndFlush() throws IOException { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); - EasyMock.replay(optionsMock); - EasyMock.expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); - Capture capturedBuffer = Capture.newInstance(); - storageRpcMock.write(EasyMock.eq(UPLOAD_ID), EasyMock.capture(capturedBuffer), EasyMock.eq(0), - EasyMock.eq(BLOB_INFO.toPb()), EasyMock.eq(0L), EasyMock.eq(DEFAULT_CHUNK_SIZE), - EasyMock.eq(false)); - EasyMock.expectLastCall(); - EasyMock.replay(storageRpcMock); - writer = new BlobWriteChannelImpl(optionsMock, BLOB_INFO, EMPTY_RPC_OPTIONS); - ByteBuffer[] buffers = new ByteBuffer[DEFAULT_CHUNK_SIZE / MIN_CHUNK_SIZE]; - for (int i = 0; i < buffers.length; i++) { - buffers[i] = randomBuffer(MIN_CHUNK_SIZE); - assertEquals(MIN_CHUNK_SIZE, writer.write(buffers[i])); - } - for (int i = 0; i < buffers.length; i++) { - assertArrayEquals( - buffers[i].array(), - Arrays.copyOfRange( - capturedBuffer.getValue(), MIN_CHUNK_SIZE * i, MIN_CHUNK_SIZE * (i + 1))); - } - } - - @Test - public void testCloseWithoutFlush() throws IOException { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); - EasyMock.replay(optionsMock); - EasyMock.expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); - Capture capturedBuffer = Capture.newInstance(); - storageRpcMock.write(EasyMock.eq(UPLOAD_ID), EasyMock.capture(capturedBuffer), EasyMock.eq(0), - EasyMock.eq(BLOB_INFO.toPb()), EasyMock.eq(0L), EasyMock.eq(0), EasyMock.eq(true)); - EasyMock.expectLastCall(); - EasyMock.replay(storageRpcMock); - writer = new BlobWriteChannelImpl(optionsMock, BLOB_INFO, EMPTY_RPC_OPTIONS); - assertTrue(writer.isOpen()); - writer.close(); - assertArrayEquals(new byte[0], capturedBuffer.getValue()); - assertTrue(!writer.isOpen()); - } - - @Test - public void testCloseWithFlush() throws IOException { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); - EasyMock.replay(optionsMock); - EasyMock.expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); - Capture capturedBuffer = Capture.newInstance(); - ByteBuffer buffer = randomBuffer(MIN_CHUNK_SIZE); - storageRpcMock.write(EasyMock.eq(UPLOAD_ID), EasyMock.capture(capturedBuffer), EasyMock.eq(0), - EasyMock.eq(BLOB_INFO.toPb()), EasyMock.eq(0L), EasyMock.eq(MIN_CHUNK_SIZE), - EasyMock.eq(true)); - EasyMock.expectLastCall(); - EasyMock.replay(storageRpcMock); - writer = new BlobWriteChannelImpl(optionsMock, BLOB_INFO, EMPTY_RPC_OPTIONS); - assertTrue(writer.isOpen()); - writer.write(buffer); - writer.close(); - assertEquals(DEFAULT_CHUNK_SIZE, capturedBuffer.getValue().length); - assertArrayEquals(buffer.array(), Arrays.copyOf(capturedBuffer.getValue(), MIN_CHUNK_SIZE)); - assertTrue(!writer.isOpen()); - } - - @Test - public void testWriteClosed() throws IOException { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); - EasyMock.replay(optionsMock); - EasyMock.expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); - Capture capturedBuffer = Capture.newInstance(); - storageRpcMock.write(EasyMock.eq(UPLOAD_ID), EasyMock.capture(capturedBuffer), EasyMock.eq(0), - EasyMock.eq(BLOB_INFO.toPb()), EasyMock.eq(0L), EasyMock.eq(0), EasyMock.eq(true)); - EasyMock.expectLastCall(); - EasyMock.replay(storageRpcMock); - writer = new BlobWriteChannelImpl(optionsMock, BLOB_INFO, EMPTY_RPC_OPTIONS); - writer.close(); - try { - writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE)); - fail("Expected BlobWriteChannel write to throw IOException"); - } catch (IOException ex) { - // expected - } - } - - @Test - public void testSaveAndRestore() throws IOException { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock).times(2); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()).times(2); - EasyMock.replay(optionsMock); - EasyMock.expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); - Capture capturedBuffer = Capture.newInstance(CaptureType.ALL); - Capture capturedPosition = Capture.newInstance(CaptureType.ALL); - storageRpcMock.write(EasyMock.eq(UPLOAD_ID), EasyMock.capture(capturedBuffer), EasyMock.eq(0), - EasyMock.eq(BLOB_INFO.toPb()), EasyMock.captureLong(capturedPosition), - EasyMock.eq(DEFAULT_CHUNK_SIZE), EasyMock.eq(false)); - EasyMock.expectLastCall().times(2); - EasyMock.replay(storageRpcMock); - ByteBuffer buffer1 = randomBuffer(DEFAULT_CHUNK_SIZE); - ByteBuffer buffer2 = randomBuffer(DEFAULT_CHUNK_SIZE); - writer = new BlobWriteChannelImpl(optionsMock, BLOB_INFO, EMPTY_RPC_OPTIONS); - assertEquals(DEFAULT_CHUNK_SIZE, writer.write(buffer1)); - assertArrayEquals(buffer1.array(), capturedBuffer.getValues().get(0)); - assertEquals(new Long(0L), capturedPosition.getValues().get(0)); - RestorableState writerState = writer.save(); - BlobWriteChannel restoredWriter = writerState.restore(); - assertEquals(DEFAULT_CHUNK_SIZE, restoredWriter.write(buffer2)); - assertArrayEquals(buffer2.array(), capturedBuffer.getValues().get(1)); - assertEquals(new Long(DEFAULT_CHUNK_SIZE), capturedPosition.getValues().get(1)); - } - - @Test - public void testSaveAndRestoreClosed() throws IOException { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock).times(2); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); - EasyMock.replay(optionsMock); - EasyMock.expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); - Capture capturedBuffer = Capture.newInstance(); - storageRpcMock.write(EasyMock.eq(UPLOAD_ID), EasyMock.capture(capturedBuffer), EasyMock.eq(0), - EasyMock.eq(BLOB_INFO.toPb()), EasyMock.eq(0L), EasyMock.eq(0), EasyMock.eq(true)); - EasyMock.expectLastCall(); - EasyMock.replay(storageRpcMock); - writer = new BlobWriteChannelImpl(optionsMock, BLOB_INFO, EMPTY_RPC_OPTIONS); - writer.close(); - RestorableState writerState = writer.save(); - RestorableState expectedWriterState = - BlobWriteChannelImpl.StateImpl.builder(optionsMock, BLOB_INFO, UPLOAD_ID) - .buffer(null) - .chunkSize(DEFAULT_CHUNK_SIZE) - .isOpen(false) - .position(0) - .build(); - BlobWriteChannel restoredWriter = writerState.restore(); - assertArrayEquals(new byte[0], capturedBuffer.getValue()); - assertEquals(expectedWriterState, restoredWriter.save()); - } - - @Test - public void testStateEquals() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock).times(2); - EasyMock.replay(optionsMock); - EasyMock.expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID) - .times(2); - EasyMock.replay(storageRpcMock); - writer = new BlobWriteChannelImpl(optionsMock, BLOB_INFO, EMPTY_RPC_OPTIONS); - BlobWriteChannel writer2 = new BlobWriteChannelImpl(optionsMock, BLOB_INFO, EMPTY_RPC_OPTIONS); - RestorableState state = writer.save(); - RestorableState state2 = writer2.save(); - assertEquals(state, state2); - assertEquals(state.hashCode(), state2.hashCode()); - assertEquals(state.toString(), state2.toString()); - } - - private static ByteBuffer randomBuffer(int size) { - byte[] byteArray = new byte[size]; - RANDOM.nextBytes(byteArray); - return ByteBuffer.wrap(byteArray); - } -} diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelTest.java new file mode 100644 index 000000000000..e499f6b9de52 --- /dev/null +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BlobWriteChannelTest.java @@ -0,0 +1,248 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.storage; + +import static org.easymock.EasyMock.anyObject; +import static org.easymock.EasyMock.capture; +import static org.easymock.EasyMock.captureLong; +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.eq; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.expectLastCall; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.RestorableState; +import com.google.gcloud.RetryParams; +import com.google.gcloud.WriteChannel; +import com.google.gcloud.spi.StorageRpc; +import com.google.gcloud.spi.StorageRpcFactory; + +import org.easymock.Capture; +import org.easymock.CaptureType; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.Map; +import java.util.Random; + +public class BlobWriteChannelTest { + + private static final String BUCKET_NAME = "b"; + private static final String BLOB_NAME = "n"; + private static final String UPLOAD_ID = "uploadid"; + private static final BlobInfo BLOB_INFO = BlobInfo.builder(BUCKET_NAME, BLOB_NAME).build(); + private static final Map EMPTY_RPC_OPTIONS = ImmutableMap.of(); + private static final int MIN_CHUNK_SIZE = 256 * 1024; + private static final int DEFAULT_CHUNK_SIZE = 8 * MIN_CHUNK_SIZE; + private static final int CUSTOM_CHUNK_SIZE = 4 * MIN_CHUNK_SIZE; + private static final Random RANDOM = new Random(); + + private StorageOptions options; + private StorageRpcFactory rpcFactoryMock; + private StorageRpc storageRpcMock; + private BlobWriteChannel writer; + + @Before + public void setUp() { + rpcFactoryMock = createMock(StorageRpcFactory.class); + storageRpcMock = createMock(StorageRpc.class); + expect(rpcFactoryMock.create(anyObject(StorageOptions.class))) + .andReturn(storageRpcMock); + replay(rpcFactoryMock); + options = StorageOptions.builder() + .projectId("projectid") + .serviceRpcFactory(rpcFactoryMock) + .retryParams(RetryParams.noRetries()) + .build(); + } + + @After + public void tearDown() throws Exception { + verify(rpcFactoryMock, storageRpcMock); + } + + @Test + public void testCreate() { + expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); + replay(storageRpcMock); + writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + assertTrue(writer.isOpen()); + } + + @Test + public void testWriteWithoutFlush() throws IOException { + expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); + replay(storageRpcMock); + writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + assertEquals(MIN_CHUNK_SIZE, writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE))); + } + + @Test + public void testWriteWithFlush() throws IOException { + expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), + eq(CUSTOM_CHUNK_SIZE), eq(false)); + replay(storageRpcMock); + writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer.chunkSize(CUSTOM_CHUNK_SIZE); + ByteBuffer buffer = randomBuffer(CUSTOM_CHUNK_SIZE); + assertEquals(CUSTOM_CHUNK_SIZE, writer.write(buffer)); + assertArrayEquals(buffer.array(), capturedBuffer.getValue()); + } + + @Test + public void testWritesAndFlush() throws IOException { + expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), + eq(DEFAULT_CHUNK_SIZE), eq(false)); + replay(storageRpcMock); + writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + ByteBuffer[] buffers = new ByteBuffer[DEFAULT_CHUNK_SIZE / MIN_CHUNK_SIZE]; + for (int i = 0; i < buffers.length; i++) { + buffers[i] = randomBuffer(MIN_CHUNK_SIZE); + assertEquals(MIN_CHUNK_SIZE, writer.write(buffers[i])); + } + for (int i = 0; i < buffers.length; i++) { + assertArrayEquals( + buffers[i].array(), + Arrays.copyOfRange( + capturedBuffer.getValue(), MIN_CHUNK_SIZE * i, MIN_CHUNK_SIZE * (i + 1))); + } + } + + @Test + public void testCloseWithoutFlush() throws IOException { + expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true)); + replay(storageRpcMock); + writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + assertTrue(writer.isOpen()); + writer.close(); + assertArrayEquals(new byte[0], capturedBuffer.getValue()); + assertTrue(!writer.isOpen()); + } + + @Test + public void testCloseWithFlush() throws IOException { + expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + ByteBuffer buffer = randomBuffer(MIN_CHUNK_SIZE); + storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(MIN_CHUNK_SIZE), + eq(true)); + replay(storageRpcMock); + writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + assertTrue(writer.isOpen()); + writer.write(buffer); + writer.close(); + assertEquals(DEFAULT_CHUNK_SIZE, capturedBuffer.getValue().length); + assertArrayEquals(buffer.array(), Arrays.copyOf(capturedBuffer.getValue(), MIN_CHUNK_SIZE)); + assertTrue(!writer.isOpen()); + } + + @Test + public void testWriteClosed() throws IOException { + expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true)); + replay(storageRpcMock); + writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer.close(); + try { + writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE)); + fail("Expected BlobWriteChannel write to throw IOException"); + } catch (IOException ex) { + // expected + } + } + + @Test + public void testSaveAndRestore() throws IOException { + expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(CaptureType.ALL); + Capture capturedPosition = Capture.newInstance(CaptureType.ALL); + storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), + captureLong(capturedPosition), eq(DEFAULT_CHUNK_SIZE), eq(false)); + expectLastCall().times(2); + replay(storageRpcMock); + ByteBuffer buffer1 = randomBuffer(DEFAULT_CHUNK_SIZE); + ByteBuffer buffer2 = randomBuffer(DEFAULT_CHUNK_SIZE); + writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + assertEquals(DEFAULT_CHUNK_SIZE, writer.write(buffer1)); + assertArrayEquals(buffer1.array(), capturedBuffer.getValues().get(0)); + assertEquals(new Long(0L), capturedPosition.getValues().get(0)); + RestorableState writerState = writer.capture(); + WriteChannel restoredWriter = writerState.restore(); + assertEquals(DEFAULT_CHUNK_SIZE, restoredWriter.write(buffer2)); + assertArrayEquals(buffer2.array(), capturedBuffer.getValues().get(1)); + assertEquals(new Long(DEFAULT_CHUNK_SIZE), capturedPosition.getValues().get(1)); + } + + @Test + public void testSaveAndRestoreClosed() throws IOException { + expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID); + Capture capturedBuffer = Capture.newInstance(); + storageRpcMock.write(eq(UPLOAD_ID), capture(capturedBuffer), eq(0), eq(0L), eq(0), eq(true)); + replay(storageRpcMock); + writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + writer.close(); + RestorableState writerState = writer.capture(); + RestorableState expectedWriterState = + BlobWriteChannel.StateImpl.builder(options, BLOB_INFO, UPLOAD_ID) + .buffer(null) + .chunkSize(DEFAULT_CHUNK_SIZE) + .isOpen(false) + .position(0) + .build(); + WriteChannel restoredWriter = writerState.restore(); + assertArrayEquals(new byte[0], capturedBuffer.getValue()); + assertEquals(expectedWriterState, restoredWriter.capture()); + } + + @Test + public void testStateEquals() { + expect(storageRpcMock.open(BLOB_INFO.toPb(), EMPTY_RPC_OPTIONS)).andReturn(UPLOAD_ID).times(2); + replay(storageRpcMock); + writer = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + // avoid closing when you don't want partial writes to GCS upon failure + @SuppressWarnings("resource") + WriteChannel writer2 = new BlobWriteChannel(options, BLOB_INFO, EMPTY_RPC_OPTIONS); + RestorableState state = writer.capture(); + RestorableState state2 = writer2.capture(); + assertEquals(state, state2); + assertEquals(state.hashCode(), state2.hashCode()); + assertEquals(state.toString(), state2.toString()); + } + + private static ByteBuffer randomBuffer(int size) { + byte[] byteArray = new byte[size]; + RANDOM.nextBytes(byteArray); + return ByteBuffer.wrap(byteArray); + } +} diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketInfoTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketInfoTest.java index 09ba0e8cda8e..bd6bcdbbcff2 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketInfoTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketInfoTest.java @@ -18,7 +18,6 @@ import static com.google.gcloud.storage.Acl.Project.ProjectRole.VIEWERS; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import com.google.api.services.storage.model.Bucket.Lifecycle.Rule; @@ -31,10 +30,8 @@ import com.google.gcloud.storage.BucketInfo.DeleteRule; import com.google.gcloud.storage.BucketInfo.DeleteRule.Type; import com.google.gcloud.storage.BucketInfo.IsLiveDeleteRule; -import com.google.gcloud.storage.BucketInfo.Location; import com.google.gcloud.storage.BucketInfo.NumNewerVersionsDeleteRule; import com.google.gcloud.storage.BucketInfo.RawDeleteRule; -import com.google.gcloud.storage.BucketInfo.StorageClass; import org.junit.Test; @@ -44,8 +41,8 @@ public class BucketInfoTest { private static final List ACL = ImmutableList.of( - new Acl(User.ofAllAuthenticatedUsers(), Role.READER), - new Acl(new Project(VIEWERS, "p1"), Role.WRITER)); + Acl.of(User.ofAllAuthenticatedUsers(), Role.READER), + Acl.of(new Project(VIEWERS, "p1"), Role.WRITER)); private static final String ETAG = "0xFF00"; private static final String ID = "B/N:1"; private static final Long META_GENERATION = 10L; @@ -54,13 +51,13 @@ public class BucketInfoTest { private static final Long CREATE_TIME = System.currentTimeMillis(); private static final List CORS = Collections.singletonList(Cors.builder().build()); private static final List DEFAULT_ACL = - Collections.singletonList(new Acl(User.ofAllAuthenticatedUsers(), Role.WRITER)); + Collections.singletonList(Acl.of(User.ofAllAuthenticatedUsers(), Role.WRITER)); private static final List DELETE_RULES = Collections.singletonList(new AgeDeleteRule(5)); private static final String INDEX_PAGE = "index.html"; private static final String NOT_FOUND_PAGE = "error.html"; - private static final Location LOCATION = Location.asia(); - private static final StorageClass STORAGE_CLASS = StorageClass.standard(); + private static final String LOCATION = "ASIA"; + private static final String STORAGE_CLASS = "STANDARD"; private static final Boolean VERSIONING_ENABLED = true; private static final BucketInfo BUCKET_INFO = BucketInfo.builder("b") .acl(ACL) @@ -93,7 +90,7 @@ public void testToBuilder() { @Test public void testToBuilderIncomplete() { BucketInfo incompleteBucketInfo = BucketInfo.builder("b").build(); - assertEquals(incompleteBucketInfo.name(), incompleteBucketInfo.toBuilder().build().name()); + compareBuckets(incompleteBucketInfo, incompleteBucketInfo.toBuilder().build()); } @Test @@ -149,16 +146,6 @@ private void compareBuckets(BucketInfo expected, BucketInfo value) { assertEquals(expected.versioningEnabled(), value.versioningEnabled()); } - @Test - public void testLocation() { - assertEquals("ASIA", Location.asia().value()); - assertEquals("EU", Location.eu().value()); - assertEquals("US", Location.us().value()); - assertSame(Location.asia(), Location.of("asia")); - assertSame(Location.eu(), Location.of("EU")); - assertSame(Location.us(), Location.of("uS")); - } - @Test public void testDeleteRules() { AgeDeleteRule ageRule = new AgeDeleteRule(10); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketTest.java index af156cb932ee..4e253033c6f2 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/BucketTest.java @@ -23,28 +23,37 @@ import static org.easymock.EasyMock.verify; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import com.google.common.collect.ImmutableList; +import com.google.gcloud.Page; +import com.google.gcloud.PageImpl; import com.google.gcloud.storage.BatchResponse.Result; + +import org.easymock.Capture; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; import java.util.Collections; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Set; -import org.easymock.Capture; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; public class BucketTest { - private static final BucketInfo BUCKET_INFO = BucketInfo.of("b"); + private static final BucketInfo BUCKET_INFO = BucketInfo.builder("b").metageneration(42L).build(); private static final Iterable BLOB_INFO_RESULTS = ImmutableList.of( BlobInfo.builder("b", "n1").build(), BlobInfo.builder("b", "n2").build(), BlobInfo.builder("b", "n3").build()); + private static final String CONTENT_TYPE = "text/plain"; private Storage storage; private Bucket bucket; @@ -68,14 +77,16 @@ public void testInfo() throws Exception { @Test public void testExists_True() throws Exception { - expect(storage.get(BUCKET_INFO.name())).andReturn(BUCKET_INFO); + Storage.BucketGetOption[] expectedOptions = {Storage.BucketGetOption.fields()}; + expect(storage.get(BUCKET_INFO.name(), expectedOptions)).andReturn(BUCKET_INFO); replay(storage); assertTrue(bucket.exists()); } @Test public void testExists_False() throws Exception { - expect(storage.get(BUCKET_INFO.name())).andReturn(null); + Storage.BucketGetOption[] expectedOptions = {Storage.BucketGetOption.fields()}; + expect(storage.get(BUCKET_INFO.name(), expectedOptions)).andReturn(null); replay(storage); assertFalse(bucket.exists()); } @@ -86,7 +97,25 @@ public void testReload() throws Exception { expect(storage.get(updatedInfo.name())).andReturn(updatedInfo); replay(storage); Bucket updatedBucket = bucket.reload(); - assertSame(storage, bucket.storage()); + assertSame(storage, updatedBucket.storage()); + assertEquals(updatedInfo, updatedBucket.info()); + } + + @Test + public void testReloadNull() throws Exception { + expect(storage.get(BUCKET_INFO.name())).andReturn(null); + replay(storage); + assertNull(bucket.reload()); + } + + @Test + public void testReloadWithOptions() throws Exception { + BucketInfo updatedInfo = BUCKET_INFO.toBuilder().notFoundPage("p").build(); + expect(storage.get(updatedInfo.name(), Storage.BucketGetOption.metagenerationMatch(42L))) + .andReturn(updatedInfo); + replay(storage); + Bucket updatedBucket = bucket.reload(Bucket.BucketSourceOption.metagenerationMatch()); + assertSame(storage, updatedBucket.storage()); assertEquals(updatedInfo, updatedBucket.info()); } @@ -109,24 +138,28 @@ public void testDelete() throws Exception { @Test public void testList() throws Exception { - BaseListResult blobInfoResult = new BaseListResult<>(null, "c", BLOB_INFO_RESULTS); - expect(storage.list(BUCKET_INFO.name())).andReturn(blobInfoResult); - replay(storage); - ListResult blobResult = bucket.list(); - Iterator blobInfoIterator = blobInfoResult.iterator(); - Iterator blobIterator = blobResult.iterator(); + StorageOptions storageOptions = createStrictMock(StorageOptions.class); + PageImpl blobInfoPage = new PageImpl<>(null, "c", BLOB_INFO_RESULTS); + expect(storage.list(BUCKET_INFO.name())).andReturn(blobInfoPage); + expect(storage.options()).andReturn(storageOptions); + expect(storageOptions.service()).andReturn(storage); + replay(storage, storageOptions); + Page blobPage = bucket.list(); + Iterator blobInfoIterator = blobInfoPage.values().iterator(); + Iterator blobIterator = blobPage.values().iterator(); while (blobInfoIterator.hasNext() && blobIterator.hasNext()) { assertEquals(blobInfoIterator.next(), blobIterator.next().info()); } assertFalse(blobInfoIterator.hasNext()); assertFalse(blobIterator.hasNext()); - assertEquals(blobInfoResult.nextPageCursor(), blobResult.nextPageCursor()); + assertEquals(blobInfoPage.nextPageCursor(), blobPage.nextPageCursor()); + verify(storageOptions); } @Test public void testGet() throws Exception { BlobInfo info = BlobInfo.builder("b", "n").build(); - expect(storage.get(BlobId.of(bucket.info().name(), "n"), new Storage.BlobSourceOption[0])) + expect(storage.get(BlobId.of(bucket.info().name(), "n"), new Storage.BlobGetOption[0])) .andReturn(info); replay(storage); Blob blob = bucket.get("n"); @@ -140,9 +173,9 @@ public void testGetAll() throws Exception { for (BlobInfo info : BLOB_INFO_RESULTS) { batchResultList.add(new Result<>(info)); } - BatchResponse response = - new BatchResponse(Collections.EMPTY_LIST, Collections.EMPTY_LIST, batchResultList); - expect(storage.apply(capture(capturedBatchRequest))).andReturn(response); + BatchResponse response = new BatchResponse(Collections.>emptyList(), + Collections.>emptyList(), batchResultList); + expect(storage.submit(capture(capturedBatchRequest))).andReturn(response); replay(storage); List blobs = bucket.get("n1", "n2", "n3"); Set blobInfoSet = capturedBatchRequest.getValue().toGet().keySet(); @@ -161,19 +194,70 @@ public void testGetAll() throws Exception { @Test public void testCreate() throws Exception { - BlobInfo info = BlobInfo.builder("b", "n").build(); + BlobInfo info = BlobInfo.builder("b", "n").contentType(CONTENT_TYPE).build(); byte[] content = {0xD, 0xE, 0xA, 0xD}; expect(storage.create(info, content)).andReturn(info); replay(storage); - Blob blob = bucket.create("n", content); + Blob blob = bucket.create("n", content, CONTENT_TYPE); assertEquals(info, blob.info()); } @Test - public void testLoad() throws Exception { + public void testCreateNullContentType() throws Exception { + BlobInfo info = BlobInfo.builder("b", "n").contentType(Storage.DEFAULT_CONTENT_TYPE).build(); + byte[] content = {0xD, 0xE, 0xA, 0xD}; + expect(storage.create(info, content)).andReturn(info); + replay(storage); + Blob blob = bucket.create("n", content, null); + assertEquals(info, blob.info()); + } + + @Test + public void testCreateFromStream() throws Exception { + BlobInfo info = BlobInfo.builder("b", "n").contentType(CONTENT_TYPE).build(); + byte[] content = {0xD, 0xE, 0xA, 0xD}; + InputStream streamContent = new ByteArrayInputStream(content); + expect(storage.create(info, streamContent)).andReturn(info); + replay(storage); + Blob blob = bucket.create("n", streamContent, CONTENT_TYPE); + assertEquals(info, blob.info()); + } + + @Test + public void testCreateFromStreamNullContentType() throws Exception { + BlobInfo info = BlobInfo.builder("b", "n").contentType(Storage.DEFAULT_CONTENT_TYPE).build(); + byte[] content = {0xD, 0xE, 0xA, 0xD}; + InputStream streamContent = new ByteArrayInputStream(content); + expect(storage.create(info, streamContent)).andReturn(info); + replay(storage); + Blob blob = bucket.create("n", streamContent, null); + assertEquals(info, blob.info()); + } + + @Test + public void testStaticGet() throws Exception { expect(storage.get(BUCKET_INFO.name())).andReturn(BUCKET_INFO); replay(storage); - Bucket loadedBucket = Bucket.load(storage, BUCKET_INFO.name()); + Bucket loadedBucket = Bucket.get(storage, BUCKET_INFO.name()); + assertNotNull(loadedBucket); + assertEquals(BUCKET_INFO, loadedBucket.info()); + } + + @Test + public void testStaticGetNull() throws Exception { + expect(storage.get(BUCKET_INFO.name())).andReturn(null); + replay(storage); + assertNull(Bucket.get(storage, BUCKET_INFO.name())); + } + + @Test + public void testStaticGetWithOptions() throws Exception { + expect(storage.get(BUCKET_INFO.name(), Storage.BucketGetOption.fields())) + .andReturn(BUCKET_INFO); + replay(storage); + Bucket loadedBucket = + Bucket.get(storage, BUCKET_INFO.name(), Storage.BucketGetOption.fields()); + assertNotNull(loadedBucket); assertEquals(BUCKET_INFO, loadedBucket.info()); } } diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyRequestTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyRequestTest.java new file mode 100644 index 000000000000..b7e8d14e53a1 --- /dev/null +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyRequestTest.java @@ -0,0 +1,129 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.storage; + +import static com.google.gcloud.storage.Storage.PredefinedAcl.PUBLIC_READ; +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; +import com.google.gcloud.storage.Storage.BlobSourceOption; +import com.google.gcloud.storage.Storage.BlobTargetOption; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +public class CopyRequestTest { + + private static final String SOURCE_BUCKET_NAME = "b0"; + private static final String SOURCE_BLOB_NAME = "o0"; + private static final String TARGET_BUCKET_NAME = "b1"; + private static final String TARGET_BLOB_NAME = "o1"; + private static final String TARGET_BLOB_CONTENT_TYPE = "contentType"; + private static final BlobId SOURCE_BLOB_ID = BlobId.of(SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME); + private static final BlobId TARGET_BLOB_ID = BlobId.of(TARGET_BUCKET_NAME, TARGET_BLOB_NAME); + private static final BlobInfo TARGET_BLOB_INFO = BlobInfo.builder(TARGET_BLOB_ID) + .contentType(TARGET_BLOB_CONTENT_TYPE).build(); + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @Test + public void testCopyRequest() { + Storage.CopyRequest copyRequest1 = Storage.CopyRequest.builder() + .source(SOURCE_BLOB_ID) + .sourceOptions(BlobSourceOption.generationMatch(1)) + .target(TARGET_BLOB_INFO, BlobTargetOption.predefinedAcl(PUBLIC_READ)) + .build(); + assertEquals(SOURCE_BLOB_ID, copyRequest1.source()); + assertEquals(1, copyRequest1.sourceOptions().size()); + assertEquals(BlobSourceOption.generationMatch(1), copyRequest1.sourceOptions().get(0)); + assertEquals(TARGET_BLOB_INFO, copyRequest1.target()); + assertEquals(1, copyRequest1.targetOptions().size()); + assertEquals(BlobTargetOption.predefinedAcl(PUBLIC_READ), copyRequest1.targetOptions().get(0)); + + Storage.CopyRequest copyRequest2 = Storage.CopyRequest.builder() + .source(SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME) + .target(TARGET_BLOB_ID) + .build(); + assertEquals(SOURCE_BLOB_ID, copyRequest2.source()); + assertEquals(BlobInfo.builder(TARGET_BLOB_ID).build(), copyRequest2.target()); + + Storage.CopyRequest copyRequest3 = Storage.CopyRequest.builder() + .source(SOURCE_BLOB_ID) + .target(TARGET_BLOB_INFO, ImmutableList.of(BlobTargetOption.predefinedAcl(PUBLIC_READ))) + .build(); + assertEquals(SOURCE_BLOB_ID, copyRequest3.source()); + assertEquals(TARGET_BLOB_INFO, copyRequest3.target()); + assertEquals(ImmutableList.of(BlobTargetOption.predefinedAcl(PUBLIC_READ)), + copyRequest3.targetOptions()); + } + + @Test + public void testCopyRequestOf() { + Storage.CopyRequest copyRequest1 = Storage.CopyRequest.of(SOURCE_BLOB_ID, TARGET_BLOB_INFO); + assertEquals(SOURCE_BLOB_ID, copyRequest1.source()); + assertEquals(TARGET_BLOB_INFO, copyRequest1.target()); + + Storage.CopyRequest copyRequest2 = Storage.CopyRequest.of(SOURCE_BLOB_ID, TARGET_BLOB_NAME); + assertEquals(SOURCE_BLOB_ID, copyRequest2.source()); + assertEquals(BlobInfo.builder(SOURCE_BUCKET_NAME, TARGET_BLOB_NAME).build(), + copyRequest2.target()); + + Storage.CopyRequest copyRequest3 = + Storage.CopyRequest.of(SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME, TARGET_BLOB_INFO); + assertEquals(SOURCE_BLOB_ID, copyRequest3.source()); + assertEquals(TARGET_BLOB_INFO, copyRequest3.target()); + + Storage.CopyRequest copyRequest4 = + Storage.CopyRequest.of(SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME, TARGET_BLOB_NAME); + assertEquals(SOURCE_BLOB_ID, copyRequest4.source()); + assertEquals(BlobInfo.builder(SOURCE_BUCKET_NAME, TARGET_BLOB_NAME).build(), + copyRequest4.target()); + + Storage.CopyRequest copyRequest5 = Storage.CopyRequest.of(SOURCE_BLOB_ID, TARGET_BLOB_ID); + assertEquals(SOURCE_BLOB_ID, copyRequest5.source()); + assertEquals(BlobInfo.builder(TARGET_BLOB_ID).build(), copyRequest5.target()); + + Storage.CopyRequest copyRequest6 = + Storage.CopyRequest.of(SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME, TARGET_BLOB_ID); + assertEquals(SOURCE_BLOB_ID, copyRequest6.source()); + assertEquals(BlobInfo.builder(TARGET_BLOB_ID).build(), copyRequest6.target()); + } + + @Test + public void testCopyRequestFail() { + thrown.expect(IllegalArgumentException.class); + Storage.CopyRequest.builder() + .source(SOURCE_BLOB_ID) + .target(BlobInfo.builder(TARGET_BLOB_ID).build()) + .build(); + } + + @Test + public void testCopyRequestOfBlobInfoFail() { + thrown.expect(IllegalArgumentException.class); + Storage.CopyRequest.of(SOURCE_BLOB_ID, BlobInfo.builder(TARGET_BLOB_ID).build()); + } + + @Test + public void testCopyRequestOfStringFail() { + thrown.expect(IllegalArgumentException.class); + Storage.CopyRequest.of( + SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME, BlobInfo.builder(TARGET_BLOB_ID).build()); + } +} diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyWriterTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyWriterTest.java new file mode 100644 index 000000000000..1b1ffd987de6 --- /dev/null +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/CopyWriterTest.java @@ -0,0 +1,125 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.storage; + +import static org.easymock.EasyMock.anyObject; +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.RestorableState; +import com.google.gcloud.RetryParams; +import com.google.gcloud.spi.StorageRpc; +import com.google.gcloud.spi.StorageRpc.RewriteRequest; +import com.google.gcloud.spi.StorageRpc.RewriteResponse; +import com.google.gcloud.spi.StorageRpcFactory; + +import org.easymock.EasyMock; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.util.Map; + +public class CopyWriterTest { + + private static final String SOURCE_BUCKET_NAME = "b"; + private static final String SOURCE_BLOB_NAME = "n"; + private static final String DESTINATION_BUCKET_NAME = "b1"; + private static final String DESTINATION_BLOB_NAME = "n1"; + private static final BlobId BLOB_ID = BlobId.of(SOURCE_BUCKET_NAME, SOURCE_BLOB_NAME); + private static final BlobInfo BLOB_INFO = + BlobInfo.builder(DESTINATION_BUCKET_NAME, DESTINATION_BLOB_NAME).build(); + private static final BlobInfo RESULT = + BlobInfo.builder(DESTINATION_BUCKET_NAME, DESTINATION_BLOB_NAME).contentType("type").build(); + private static final Map EMPTY_OPTIONS = ImmutableMap.of(); + private static final RewriteRequest REQUEST = new StorageRpc.RewriteRequest(BLOB_ID.toPb(), + EMPTY_OPTIONS, BLOB_INFO.toPb(), EMPTY_OPTIONS, null); + private static final RewriteResponse RESPONSE = new StorageRpc.RewriteResponse(REQUEST, + null, 42L, false, "token", 21L); + private static final RewriteResponse RESPONSE_DONE = new StorageRpc.RewriteResponse(REQUEST, + RESULT.toPb(), 42L, true, "token", 42L); + + private StorageOptions options; + private StorageRpcFactory rpcFactoryMock; + private StorageRpc storageRpcMock; + private CopyWriter copyWriter; + + @Before + public void setUp() { + rpcFactoryMock = createMock(StorageRpcFactory.class); + storageRpcMock = createMock(StorageRpc.class); + expect(rpcFactoryMock.create(anyObject(StorageOptions.class))) + .andReturn(storageRpcMock); + replay(rpcFactoryMock); + options = StorageOptions.builder() + .projectId("projectid") + .serviceRpcFactory(rpcFactoryMock) + .retryParams(RetryParams.noRetries()) + .build(); + } + + @After + public void tearDown() throws Exception { + verify(rpcFactoryMock, storageRpcMock); + } + + @Test + public void testRewrite() { + EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE)).andReturn(RESPONSE_DONE); + EasyMock.replay(storageRpcMock); + copyWriter = new CopyWriter(options, RESPONSE); + assertEquals(RESULT, copyWriter.result()); + assertTrue(copyWriter.isDone()); + assertEquals(42L, copyWriter.totalBytesCopied()); + assertEquals(42L, copyWriter.blobSize()); + } + + @Test + public void testRewriteMultipleRequests() { + EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE)).andReturn(RESPONSE); + EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE)).andReturn(RESPONSE_DONE); + EasyMock.replay(storageRpcMock); + copyWriter = new CopyWriter(options, RESPONSE); + assertEquals(RESULT, copyWriter.result()); + assertTrue(copyWriter.isDone()); + assertEquals(42L, copyWriter.totalBytesCopied()); + assertEquals(42L, copyWriter.blobSize()); + } + + @Test + public void testSaveAndRestore() { + EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE)).andReturn(RESPONSE); + EasyMock.expect(storageRpcMock.continueRewrite(RESPONSE)).andReturn(RESPONSE_DONE); + EasyMock.replay(storageRpcMock); + copyWriter = new CopyWriter(options, RESPONSE); + copyWriter.copyChunk(); + assertTrue(!copyWriter.isDone()); + assertEquals(21L, copyWriter.totalBytesCopied()); + assertEquals(42L, copyWriter.blobSize()); + RestorableState rewriterState = copyWriter.capture(); + CopyWriter restoredRewriter = rewriterState.restore(); + assertEquals(RESULT, restoredRewriter.result()); + assertTrue(restoredRewriter.isDone()); + assertEquals(42L, restoredRewriter.totalBytesCopied()); + assertEquals(42L, restoredRewriter.blobSize()); + } +} diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java index 2747444d1f27..63b9d739b686 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/ITStorageTest.java @@ -19,136 +19,174 @@ import static java.nio.charset.StandardCharsets.UTF_8; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +import com.google.api.client.util.Lists; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.Page; +import com.google.gcloud.ReadChannel; import com.google.gcloud.RestorableState; +import com.google.gcloud.WriteChannel; +import com.google.gcloud.storage.Storage.BlobField; +import com.google.gcloud.storage.Storage.BucketField; import com.google.gcloud.storage.testing.RemoteGcsHelper; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; -import java.io.UnsupportedEncodingException; import java.net.URL; import java.net.URLConnection; import java.nio.ByteBuffer; import java.util.Arrays; +import java.util.HashMap; import java.util.Iterator; import java.util.List; +import java.util.Map; +import java.util.Random; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; import java.util.logging.Level; import java.util.logging.Logger; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; - public class ITStorageTest { private static Storage storage; - private static RemoteGcsHelper gcsHelper; private static final Logger log = Logger.getLogger(ITStorageTest.class.getName()); - private static final String bucket = RemoteGcsHelper.generateBucketName(); + private static final String BUCKET = RemoteGcsHelper.generateBucketName(); private static final String CONTENT_TYPE = "text/plain"; private static final byte[] BLOB_BYTE_CONTENT = {0xD, 0xE, 0xA, 0xD}; private static final String BLOB_STRING_CONTENT = "Hello Google Cloud Storage!"; + private static final int MAX_BATCH_DELETES = 100; @BeforeClass public static void beforeClass() { - gcsHelper = RemoteGcsHelper.create(); - storage = StorageFactory.instance().get(gcsHelper.options()); - storage.create(BucketInfo.of(bucket)); + RemoteGcsHelper gcsHelper = RemoteGcsHelper.create(); + storage = gcsHelper.options().service(); + storage.create(BucketInfo.of(BUCKET)); } @AfterClass - public static void afterClass() - throws ExecutionException, TimeoutException, InterruptedException { - if (storage != null && !RemoteGcsHelper.forceDelete(storage, bucket, 5, TimeUnit.SECONDS)) { + public static void afterClass() throws ExecutionException, InterruptedException { + if (storage != null && !RemoteGcsHelper.forceDelete(storage, BUCKET, 5, TimeUnit.SECONDS)) { if (log.isLoggable(Level.WARNING)) { - log.log(Level.WARNING, "Deletion of bucket {0} timed out, bucket is not empty", bucket); + log.log(Level.WARNING, "Deletion of bucket {0} timed out, bucket is not empty", BUCKET); } } } @Test(timeout = 5000) public void testListBuckets() throws InterruptedException { - Iterator bucketIterator = - storage.list(Storage.BucketListOption.prefix(bucket)).iterator(); + Iterator bucketIterator = storage.list(Storage.BucketListOption.prefix(BUCKET), + Storage.BucketListOption.fields()).values().iterator(); while (!bucketIterator.hasNext()) { Thread.sleep(500); - bucketIterator = storage.list(Storage.BucketListOption.prefix(bucket)).iterator(); + bucketIterator = storage.list(Storage.BucketListOption.prefix(BUCKET), + Storage.BucketListOption.fields()).values().iterator(); } while (bucketIterator.hasNext()) { - assertTrue(bucketIterator.next().name().startsWith(bucket)); + BucketInfo remoteBucket = bucketIterator.next(); + assertTrue(remoteBucket.name().startsWith(BUCKET)); + assertNull(remoteBucket.createTime()); + assertNull(remoteBucket.selfLink()); } } + @Test + public void testGetBucketSelectedFields() { + BucketInfo remoteBucket = storage.get(BUCKET, Storage.BucketGetOption.fields(BucketField.ID)); + assertEquals(BUCKET, remoteBucket.name()); + assertNull(remoteBucket.createTime()); + assertNotNull(remoteBucket.id()); + } + + @Test + public void testGetBucketAllSelectedFields() { + BucketInfo remoteBucket = storage.get(BUCKET, + Storage.BucketGetOption.fields(BucketField.values())); + assertEquals(BUCKET, remoteBucket.name()); + assertNotNull(remoteBucket.createTime()); + assertNotNull(remoteBucket.selfLink()); + } + + @Test + public void testGetBucketEmptyFields() { + BucketInfo remoteBucket = storage.get(BUCKET, Storage.BucketGetOption.fields()); + assertEquals(BUCKET, remoteBucket.name()); + assertNull(remoteBucket.createTime()); + assertNull(remoteBucket.selfLink()); + } + @Test public void testCreateBlob() { String blobName = "test-create-blob"; - BlobInfo blob = BlobInfo.builder(bucket, blobName).build(); + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); BlobInfo remoteBlob = storage.create(blob, BLOB_BYTE_CONTENT); assertNotNull(remoteBlob); assertEquals(blob.bucket(), remoteBlob.bucket()); assertEquals(blob.name(), remoteBlob.name()); - byte[] readBytes = storage.readAllBytes(bucket, blobName); + byte[] readBytes = storage.readAllBytes(BUCKET, blobName); assertArrayEquals(BLOB_BYTE_CONTENT, readBytes); - assertTrue(storage.delete(bucket, blobName)); + assertTrue(storage.delete(BUCKET, blobName)); } @Test public void testCreateEmptyBlob() { String blobName = "test-create-empty-blob"; - BlobInfo blob = BlobInfo.builder(bucket, blobName).build(); + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); BlobInfo remoteBlob = storage.create(blob); assertNotNull(remoteBlob); assertEquals(blob.bucket(), remoteBlob.bucket()); assertEquals(blob.name(), remoteBlob.name()); - byte[] readBytes = storage.readAllBytes(bucket, blobName); + byte[] readBytes = storage.readAllBytes(BUCKET, blobName); assertArrayEquals(new byte[0], readBytes); - assertTrue(storage.delete(bucket, blobName)); + assertTrue(storage.delete(BUCKET, blobName)); } @Test - public void testCreateBlobStream() throws UnsupportedEncodingException { + public void testCreateBlobStream() { String blobName = "test-create-blob-stream"; - BlobInfo blob = BlobInfo.builder(bucket, blobName).contentType(CONTENT_TYPE).build(); + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).contentType(CONTENT_TYPE).build(); ByteArrayInputStream stream = new ByteArrayInputStream(BLOB_STRING_CONTENT.getBytes(UTF_8)); BlobInfo remoteBlob = storage.create(blob, stream); assertNotNull(remoteBlob); assertEquals(blob.bucket(), remoteBlob.bucket()); assertEquals(blob.name(), remoteBlob.name()); assertEquals(blob.contentType(), remoteBlob.contentType()); - byte[] readBytes = storage.readAllBytes(bucket, blobName); + byte[] readBytes = storage.readAllBytes(BUCKET, blobName); assertEquals(BLOB_STRING_CONTENT, new String(readBytes, UTF_8)); - assertTrue(storage.delete(bucket, blobName)); + assertTrue(storage.delete(BUCKET, blobName)); } @Test public void testCreateBlobFail() { String blobName = "test-create-blob-fail"; - BlobInfo blob = BlobInfo.builder(bucket, blobName).build(); + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); assertNotNull(storage.create(blob)); + BlobInfo wrongGenerationBlob = BlobInfo.builder(BUCKET, blobName, -1L).build(); try { - storage.create(blob.toBuilder().generation(-1L).build(), BLOB_BYTE_CONTENT, + storage.create(wrongGenerationBlob, BLOB_BYTE_CONTENT, Storage.BlobTargetOption.generationMatch()); fail("StorageException was expected"); } catch (StorageException ex) { // expected } - assertTrue(storage.delete(bucket, blobName)); + assertTrue(storage.delete(BUCKET, blobName)); } @Test - public void testCreateBlobMd5Fail() throws UnsupportedEncodingException { + public void testCreateBlobMd5Fail() { String blobName = "test-create-blob-md5-fail"; - BlobInfo blob = BlobInfo.builder(bucket, blobName) + BlobInfo blob = BlobInfo.builder(BUCKET, blobName) .contentType(CONTENT_TYPE) .md5("O1R4G1HJSDUISJjoIYmVhQ==") .build(); @@ -161,90 +199,284 @@ public void testCreateBlobMd5Fail() throws UnsupportedEncodingException { } } + @Test + public void testGetBlobEmptySelectedFields() { + String blobName = "test-get-empty-selected-fields-blob"; + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).contentType(CONTENT_TYPE).build(); + assertNotNull(storage.create(blob)); + BlobInfo remoteBlob = storage.get(blob.blobId(), Storage.BlobGetOption.fields()); + assertEquals(blob.blobId(), remoteBlob.blobId()); + assertNull(remoteBlob.contentType()); + assertTrue(storage.delete(BUCKET, blobName)); + } + + @Test + public void testGetBlobSelectedFields() { + String blobName = "test-get-selected-fields-blob"; + BlobInfo blob = BlobInfo.builder(BUCKET, blobName) + .contentType(CONTENT_TYPE) + .metadata(ImmutableMap.of("k", "v")) + .build(); + assertNotNull(storage.create(blob)); + BlobInfo remoteBlob = storage.get(blob.blobId(), Storage.BlobGetOption.fields( + BlobField.METADATA)); + assertEquals(blob.blobId(), remoteBlob.blobId()); + assertEquals(ImmutableMap.of("k", "v"), remoteBlob.metadata()); + assertNull(remoteBlob.contentType()); + assertTrue(storage.delete(BUCKET, blobName)); + } + + @Test + public void testGetBlobAllSelectedFields() { + String blobName = "test-get-all-selected-fields-blob"; + BlobInfo blob = BlobInfo.builder(BUCKET, blobName) + .contentType(CONTENT_TYPE) + .metadata(ImmutableMap.of("k", "v")) + .build(); + assertNotNull(storage.create(blob)); + BlobInfo remoteBlob = storage.get(blob.blobId(), + Storage.BlobGetOption.fields(BlobField.values())); + assertEquals(blob.bucket(), remoteBlob.bucket()); + assertEquals(blob.name(), remoteBlob.name()); + assertEquals(ImmutableMap.of("k", "v"), remoteBlob.metadata()); + assertNotNull(remoteBlob.id()); + assertNotNull(remoteBlob.selfLink()); + assertTrue(storage.delete(BUCKET, blobName)); + } + + @Test + public void testGetBlobFail() { + String blobName = "test-get-blob-fail"; + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + assertNotNull(storage.create(blob)); + BlobId wrongGenerationBlob = BlobId.of(BUCKET, blobName); + try { + storage.get(wrongGenerationBlob, Storage.BlobGetOption.generationMatch(-1)); + fail("StorageException was expected"); + } catch (StorageException ex) { + // expected + } + assertTrue(storage.delete(BUCKET, blobName)); + } + + @Test + public void testGetBlobFailNonExistingGeneration() { + String blobName = "test-get-blob-fail-non-existing-generation"; + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + assertNotNull(storage.create(blob)); + BlobId wrongGenerationBlob = BlobId.of(BUCKET, blobName, -1L); + assertNull(storage.get(wrongGenerationBlob)); + assertTrue(storage.delete(BUCKET, blobName)); + } + + @Test + public void testListBlobsSelectedFields() { + String[] blobNames = {"test-list-blobs-selected-fields-blob1", + "test-list-blobs-selected-fields-blob2"}; + ImmutableMap metadata = ImmutableMap.of("k", "v"); + BlobInfo blob1 = BlobInfo.builder(BUCKET, blobNames[0]) + .contentType(CONTENT_TYPE) + .metadata(metadata) + .build(); + BlobInfo blob2 = BlobInfo.builder(BUCKET, blobNames[1]) + .contentType(CONTENT_TYPE) + .metadata(metadata) + .build(); + assertNotNull(storage.create(blob1)); + assertNotNull(storage.create(blob2)); + Page page = storage.list(BUCKET, + Storage.BlobListOption.prefix("test-list-blobs-selected-fields-blob"), + Storage.BlobListOption.fields(BlobField.METADATA)); + int index = 0; + for (BlobInfo remoteBlob : page.values()) { + assertEquals(BUCKET, remoteBlob.bucket()); + assertEquals(blobNames[index++], remoteBlob.name()); + assertEquals(metadata, remoteBlob.metadata()); + assertNull(remoteBlob.contentType()); + } + assertTrue(storage.delete(BUCKET, blobNames[0])); + assertTrue(storage.delete(BUCKET, blobNames[1])); + } + + @Test + public void testListBlobsEmptySelectedFields() { + String[] blobNames = {"test-list-blobs-empty-selected-fields-blob1", + "test-list-blobs-empty-selected-fields-blob2"}; + BlobInfo blob1 = BlobInfo.builder(BUCKET, blobNames[0]) + .contentType(CONTENT_TYPE) + .build(); + BlobInfo blob2 = BlobInfo.builder(BUCKET, blobNames[1]) + .contentType(CONTENT_TYPE) + .build(); + assertNotNull(storage.create(blob1)); + assertNotNull(storage.create(blob2)); + Page page = storage.list(BUCKET, + Storage.BlobListOption.prefix("test-list-blobs-empty-selected-fields-blob"), + Storage.BlobListOption.fields()); + int index = 0; + for (BlobInfo remoteBlob : page.values()) { + assertEquals(BUCKET, remoteBlob.bucket()); + assertEquals(blobNames[index++], remoteBlob.name()); + assertNull(remoteBlob.contentType()); + } + assertTrue(storage.delete(BUCKET, blobNames[0])); + assertTrue(storage.delete(BUCKET, blobNames[1])); + } + @Test public void testUpdateBlob() { String blobName = "test-update-blob"; - BlobInfo blob = BlobInfo.builder(bucket, blobName).build(); + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); assertNotNull(storage.create(blob)); BlobInfo updatedBlob = storage.update(blob.toBuilder().contentType(CONTENT_TYPE).build()); assertNotNull(updatedBlob); - assertEquals(blob.bucket(), updatedBlob.bucket()); assertEquals(blob.name(), updatedBlob.name()); + assertEquals(blob.bucket(), updatedBlob.bucket()); assertEquals(CONTENT_TYPE, updatedBlob.contentType()); - assertTrue(storage.delete(bucket, blobName)); + assertTrue(storage.delete(BUCKET, blobName)); + } + + @Test + public void testUpdateBlobReplaceMetadata() { + String blobName = "test-update-blob-replace-metadata"; + ImmutableMap metadata = ImmutableMap.of("k1", "a"); + ImmutableMap newMetadata = ImmutableMap.of("k2", "b"); + BlobInfo blob = BlobInfo.builder(BUCKET, blobName) + .contentType(CONTENT_TYPE) + .metadata(metadata) + .build(); + assertNotNull(storage.create(blob)); + BlobInfo updatedBlob = storage.update(blob.toBuilder().metadata(null).build()); + assertNotNull(updatedBlob); + assertNull(updatedBlob.metadata()); + updatedBlob = storage.update(blob.toBuilder().metadata(newMetadata).build()); + assertEquals(blob.name(), updatedBlob.name()); + assertEquals(blob.bucket(), updatedBlob.bucket()); + assertEquals(newMetadata, updatedBlob.metadata()); + assertTrue(storage.delete(BUCKET, blobName)); + } + + @Test + public void testUpdateBlobMergeMetadata() { + String blobName = "test-update-blob-merge-metadata"; + ImmutableMap metadata = ImmutableMap.of("k1", "a"); + ImmutableMap newMetadata = ImmutableMap.of("k2", "b"); + ImmutableMap expectedMetadata = ImmutableMap.of("k1", "a", "k2", "b"); + BlobInfo blob = BlobInfo.builder(BUCKET, blobName) + .contentType(CONTENT_TYPE) + .metadata(metadata) + .build(); + assertNotNull(storage.create(blob)); + BlobInfo updatedBlob = storage.update(blob.toBuilder().metadata(newMetadata).build()); + assertNotNull(updatedBlob); + assertEquals(blob.name(), updatedBlob.name()); + assertEquals(blob.bucket(), updatedBlob.bucket()); + assertEquals(expectedMetadata, updatedBlob.metadata()); + assertTrue(storage.delete(BUCKET, blobName)); + } + + @Test + public void testUpdateBlobUnsetMetadata() { + String blobName = "test-update-blob-unset-metadata"; + ImmutableMap metadata = ImmutableMap.of("k1", "a", "k2", "b"); + Map newMetadata = new HashMap<>(); + newMetadata.put("k1", "a"); + newMetadata.put("k2", null); + ImmutableMap expectedMetadata = ImmutableMap.of("k1", "a"); + BlobInfo blob = BlobInfo.builder(BUCKET, blobName) + .contentType(CONTENT_TYPE) + .metadata(metadata) + .build(); + assertNotNull(storage.create(blob)); + BlobInfo updatedBlob = storage.update(blob.toBuilder().metadata(newMetadata).build()); + assertNotNull(updatedBlob); + assertEquals(blob.name(), updatedBlob.name()); + assertEquals(blob.bucket(), updatedBlob.bucket()); + assertEquals(expectedMetadata, updatedBlob.metadata()); + assertTrue(storage.delete(BUCKET, blobName)); } @Test public void testUpdateBlobFail() { String blobName = "test-update-blob-fail"; - BlobInfo blob = BlobInfo.builder(bucket, blobName).build(); + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); assertNotNull(storage.create(blob)); + BlobInfo wrongGenerationBlob = BlobInfo.builder(BUCKET, blobName, -1L) + .contentType(CONTENT_TYPE) + .build(); try { - storage.update(blob.toBuilder().contentType(CONTENT_TYPE).generation(-1L).build(), - Storage.BlobTargetOption.generationMatch()); + storage.update(wrongGenerationBlob, Storage.BlobTargetOption.generationMatch()); fail("StorageException was expected"); } catch (StorageException ex) { // expected } - assertTrue(storage.delete(bucket, blobName)); + assertTrue(storage.delete(BUCKET, blobName)); } @Test public void testDeleteNonExistingBlob() { String blobName = "test-delete-non-existing-blob"; - assertTrue(!storage.delete(bucket, blobName)); + assertTrue(!storage.delete(BUCKET, blobName)); + } + + @Test + public void testDeleteBlobNonExistingGeneration() { + String blobName = "test-delete-blob-non-existing-generation"; + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + assertNotNull(storage.create(blob)); + assertTrue(!storage.delete(BlobId.of(BUCKET, blobName, -1L))); } @Test public void testDeleteBlobFail() { String blobName = "test-delete-blob-fail"; - BlobInfo blob = BlobInfo.builder(bucket, blobName).build(); + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); assertNotNull(storage.create(blob)); try { - storage.delete(bucket, blob.name(), Storage.BlobSourceOption.generationMatch(-1L)); + storage.delete(BUCKET, blob.name(), Storage.BlobSourceOption.generationMatch(-1L)); fail("StorageException was expected"); } catch (StorageException ex) { // expected } - assertTrue(storage.delete(bucket, blob.name())); + assertTrue(storage.delete(BUCKET, blob.name())); } @Test public void testComposeBlob() { String sourceBlobName1 = "test-compose-blob-source-1"; String sourceBlobName2 = "test-compose-blob-source-2"; - BlobInfo sourceBlob1 = BlobInfo.builder(bucket, sourceBlobName1).build(); - BlobInfo sourceBlob2 = BlobInfo.builder(bucket, sourceBlobName2).build(); + BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); + BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); assertNotNull(storage.create(sourceBlob1, BLOB_BYTE_CONTENT)); assertNotNull(storage.create(sourceBlob2, BLOB_BYTE_CONTENT)); String targetBlobName = "test-compose-blob-target"; - BlobInfo targetBlob = BlobInfo.builder(bucket, targetBlobName).build(); + BlobInfo targetBlob = BlobInfo.builder(BUCKET, targetBlobName).build(); Storage.ComposeRequest req = Storage.ComposeRequest.of(ImmutableList.of(sourceBlobName1, sourceBlobName2), targetBlob); BlobInfo remoteBlob = storage.compose(req); assertNotNull(remoteBlob); - assertEquals(bucket, remoteBlob.bucket()); - assertEquals(targetBlobName, remoteBlob.name()); - byte[] readBytes = storage.readAllBytes(bucket, targetBlobName); + assertEquals(targetBlob.name(), remoteBlob.name()); + assertEquals(targetBlob.bucket(), remoteBlob.bucket()); + byte[] readBytes = storage.readAllBytes(BUCKET, targetBlobName); byte[] composedBytes = Arrays.copyOf(BLOB_BYTE_CONTENT, BLOB_BYTE_CONTENT.length * 2); System.arraycopy(BLOB_BYTE_CONTENT, 0, composedBytes, BLOB_BYTE_CONTENT.length, BLOB_BYTE_CONTENT.length); assertArrayEquals(composedBytes, readBytes); - assertTrue(storage.delete(bucket, sourceBlobName1)); - assertTrue(storage.delete(bucket, sourceBlobName2)); - assertTrue(storage.delete(bucket, targetBlobName)); + assertTrue(storage.delete(BUCKET, sourceBlobName1)); + assertTrue(storage.delete(BUCKET, sourceBlobName2)); + assertTrue(storage.delete(BUCKET, targetBlobName)); } @Test public void testComposeBlobFail() { String sourceBlobName1 = "test-compose-blob-fail-source-1"; String sourceBlobName2 = "test-compose-blob-fail-source-2"; - BlobInfo sourceBlob1 = BlobInfo.builder(bucket, sourceBlobName1).build(); - BlobInfo sourceBlob2 = BlobInfo.builder(bucket, sourceBlobName2).build(); + BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); + BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); assertNotNull(storage.create(sourceBlob1)); assertNotNull(storage.create(sourceBlob2)); String targetBlobName = "test-compose-blob-fail-target"; - BlobInfo targetBlob = BlobInfo.builder(bucket, targetBlobName).build(); + BlobInfo targetBlob = BlobInfo.builder(BUCKET, targetBlobName).build(); Storage.ComposeRequest req = Storage.ComposeRequest.builder() .addSource(sourceBlobName1, -1L) .addSource(sourceBlobName2, -1L) @@ -256,55 +488,65 @@ public void testComposeBlobFail() { } catch (StorageException ex) { // expected } - assertTrue(storage.delete(bucket, sourceBlobName1)); - assertTrue(storage.delete(bucket, sourceBlobName2)); + assertTrue(storage.delete(BUCKET, sourceBlobName1)); + assertTrue(storage.delete(BUCKET, sourceBlobName2)); } @Test public void testCopyBlob() { String sourceBlobName = "test-copy-blob-source"; - BlobInfo blob = BlobInfo.builder(bucket, sourceBlobName).build(); + BlobId source = BlobId.of(BUCKET, sourceBlobName); + ImmutableMap metadata = ImmutableMap.of("k", "v"); + BlobInfo blob = BlobInfo.builder(source) + .contentType(CONTENT_TYPE) + .metadata(metadata) + .build(); assertNotNull(storage.create(blob, BLOB_BYTE_CONTENT)); String targetBlobName = "test-copy-blob-target"; - Storage.CopyRequest req = Storage.CopyRequest.of(blob.blobId(), targetBlobName); - BlobInfo remoteBlob = storage.copy(req); - assertNotNull(remoteBlob); - assertEquals(bucket, remoteBlob.bucket()); - assertEquals(targetBlobName, remoteBlob.name()); - byte[] readBytes = storage.readAllBytes(bucket, targetBlobName); - assertArrayEquals(BLOB_BYTE_CONTENT, readBytes); - assertTrue(storage.delete(bucket, sourceBlobName)); - assertTrue(storage.delete(bucket, targetBlobName)); + Storage.CopyRequest req = Storage.CopyRequest.of(source, BlobId.of(BUCKET, targetBlobName)); + CopyWriter copyWriter = storage.copy(req); + assertEquals(BUCKET, copyWriter.result().bucket()); + assertEquals(targetBlobName, copyWriter.result().name()); + assertEquals(CONTENT_TYPE, copyWriter.result().contentType()); + assertEquals(metadata, copyWriter.result().metadata()); + assertTrue(copyWriter.isDone()); + assertTrue(storage.delete(BUCKET, sourceBlobName)); + assertTrue(storage.delete(BUCKET, targetBlobName)); } @Test public void testCopyBlobUpdateMetadata() { String sourceBlobName = "test-copy-blob-update-metadata-source"; - BlobInfo sourceBlob = BlobInfo.builder(bucket, sourceBlobName).build(); - assertNotNull(storage.create(sourceBlob)); + BlobId source = BlobId.of(BUCKET, sourceBlobName); + assertNotNull(storage.create(BlobInfo.builder(source).build(), BLOB_BYTE_CONTENT)); String targetBlobName = "test-copy-blob-update-metadata-target"; - BlobInfo targetBlob = - BlobInfo.builder(bucket, targetBlobName).contentType(CONTENT_TYPE).build(); - Storage.CopyRequest req = Storage.CopyRequest.of(bucket, sourceBlobName, targetBlob); - BlobInfo remoteBlob = storage.copy(req); - assertNotNull(remoteBlob); - assertEquals(bucket, remoteBlob.bucket()); - assertEquals(targetBlobName, remoteBlob.name()); - assertEquals(CONTENT_TYPE, remoteBlob.contentType()); - assertTrue(storage.delete(bucket, sourceBlobName)); - assertTrue(storage.delete(bucket, targetBlobName)); + ImmutableMap metadata = ImmutableMap.of("k", "v"); + BlobInfo target = BlobInfo.builder(BUCKET, targetBlobName) + .contentType(CONTENT_TYPE) + .metadata(metadata) + .build(); + Storage.CopyRequest req = Storage.CopyRequest.of(source, target); + CopyWriter copyWriter = storage.copy(req); + assertEquals(BUCKET, copyWriter.result().bucket()); + assertEquals(targetBlobName, copyWriter.result().name()); + assertEquals(CONTENT_TYPE, copyWriter.result().contentType()); + assertEquals(metadata, copyWriter.result().metadata()); + assertTrue(copyWriter.isDone()); + assertTrue(storage.delete(BUCKET, sourceBlobName)); + assertTrue(storage.delete(BUCKET, targetBlobName)); } @Test public void testCopyBlobFail() { - String sourceBlobName = "test-copy-blob-fail-source"; - BlobInfo blob = BlobInfo.builder(bucket, sourceBlobName).build(); - assertNotNull(storage.create(blob)); - String targetBlobName = "test-copy-blob-fail-target"; - Storage.CopyRequest req = new Storage.CopyRequest.Builder() - .source(bucket, sourceBlobName) - .target(BlobInfo.builder(bucket, targetBlobName).build()) - .sourceOptions(Storage.BlobSourceOption.metagenerationMatch(-1L)) + String sourceBlobName = "test-copy-blob-source-fail"; + BlobId source = BlobId.of(BUCKET, sourceBlobName, -1L); + assertNotNull(storage.create(BlobInfo.builder(source).build(), BLOB_BYTE_CONTENT)); + String targetBlobName = "test-copy-blob-target-fail"; + BlobInfo target = BlobInfo.builder(BUCKET, targetBlobName).contentType(CONTENT_TYPE).build(); + Storage.CopyRequest req = Storage.CopyRequest.builder() + .source(BUCKET, sourceBlobName) + .sourceOptions(Storage.BlobSourceOption.generationMatch(-1L)) + .target(target) .build(); try { storage.copy(req); @@ -312,15 +554,26 @@ public void testCopyBlobFail() { } catch (StorageException ex) { // expected } - assertTrue(storage.delete(bucket, sourceBlobName)); + Storage.CopyRequest req2 = Storage.CopyRequest.builder() + .source(source) + .sourceOptions(Storage.BlobSourceOption.generationMatch()) + .target(target) + .build(); + try { + storage.copy(req2); + fail("StorageException was expected"); + } catch (StorageException ex) { + // expected + } + assertTrue(storage.delete(BUCKET, sourceBlobName)); } @Test public void testBatchRequest() { String sourceBlobName1 = "test-batch-request-blob-1"; String sourceBlobName2 = "test-batch-request-blob-2"; - BlobInfo sourceBlob1 = BlobInfo.builder(bucket, sourceBlobName1).build(); - BlobInfo sourceBlob2 = BlobInfo.builder(bucket, sourceBlobName2).build(); + BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); + BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); assertNotNull(storage.create(sourceBlob1)); assertNotNull(storage.create(sourceBlob2)); @@ -331,25 +584,25 @@ public void testBatchRequest() { .update(updatedBlob1) .update(updatedBlob2) .build(); - BatchResponse updateResponse = storage.apply(updateRequest); + BatchResponse updateResponse = storage.submit(updateRequest); assertEquals(2, updateResponse.updates().size()); assertEquals(0, updateResponse.deletes().size()); assertEquals(0, updateResponse.gets().size()); BlobInfo remoteUpdatedBlob1 = updateResponse.updates().get(0).get(); BlobInfo remoteUpdatedBlob2 = updateResponse.updates().get(1).get(); - assertEquals(bucket, remoteUpdatedBlob1.bucket()); - assertEquals(bucket, remoteUpdatedBlob2.bucket()); - assertEquals(updatedBlob1.name(), remoteUpdatedBlob1.name()); - assertEquals(updatedBlob2.name(), remoteUpdatedBlob2.name()); + assertEquals(sourceBlob1.bucket(), remoteUpdatedBlob1.bucket()); + assertEquals(sourceBlob1.name(), remoteUpdatedBlob1.name()); + assertEquals(sourceBlob2.bucket(), remoteUpdatedBlob2.bucket()); + assertEquals(sourceBlob2.name(), remoteUpdatedBlob2.name()); assertEquals(updatedBlob1.contentType(), remoteUpdatedBlob1.contentType()); assertEquals(updatedBlob2.contentType(), remoteUpdatedBlob2.contentType()); // Batch get request BatchRequest getRequest = BatchRequest.builder() - .get(bucket, sourceBlobName1) - .get(bucket, sourceBlobName2) + .get(BUCKET, sourceBlobName1) + .get(BUCKET, sourceBlobName2) .build(); - BatchResponse getResponse = storage.apply(getRequest); + BatchResponse getResponse = storage.submit(getRequest); assertEquals(2, getResponse.gets().size()); assertEquals(0, getResponse.deletes().size()); assertEquals(0, getResponse.updates().size()); @@ -360,10 +613,10 @@ public void testBatchRequest() { // Batch delete request BatchRequest deleteRequest = BatchRequest.builder() - .delete(bucket, sourceBlobName1) - .delete(bucket, sourceBlobName2) + .delete(BUCKET, sourceBlobName1) + .delete(BUCKET, sourceBlobName2) .build(); - BatchResponse deleteResponse = storage.apply(deleteRequest); + BatchResponse deleteResponse = storage.submit(deleteRequest); assertEquals(2, deleteResponse.deletes().size()); assertEquals(0, deleteResponse.gets().size()); assertEquals(0, deleteResponse.updates().size()); @@ -371,40 +624,94 @@ public void testBatchRequest() { assertTrue(deleteResponse.deletes().get(1).get()); } + @Test + public void testBatchRequestManyDeletes() { + List blobsToDelete = Lists.newArrayListWithCapacity(2 * MAX_BATCH_DELETES); + for (int i = 0; i < 2 * MAX_BATCH_DELETES; i++) { + blobsToDelete.add(BlobId.of(BUCKET, "test-batch-request-many-deletes-blob-" + i)); + } + BatchRequest.Builder builder = BatchRequest.builder(); + for (BlobId blob : blobsToDelete) { + builder.delete(blob); + } + String sourceBlobName1 = "test-batch-request-many-deletes-source-blob-1"; + String sourceBlobName2 = "test-batch-request-many-deletes-source-blob-2"; + BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); + BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); + assertNotNull(storage.create(sourceBlob1)); + assertNotNull(storage.create(sourceBlob2)); + BlobInfo updatedBlob2 = sourceBlob2.toBuilder().contentType(CONTENT_TYPE).build(); + + BatchRequest updateRequest = builder + .get(BUCKET, sourceBlobName1) + .update(updatedBlob2) + .build(); + BatchResponse response = storage.submit(updateRequest); + assertEquals(2 * MAX_BATCH_DELETES, response.deletes().size()); + assertEquals(1, response.updates().size()); + assertEquals(1, response.gets().size()); + + // Check deletes + for (BatchResponse.Result deleteResult : response.deletes()) { + assertFalse(deleteResult.failed()); + assertFalse(deleteResult.get()); + } + + // Check updates + BlobInfo remoteUpdatedBlob2 = response.updates().get(0).get(); + assertEquals(sourceBlob2.bucket(), remoteUpdatedBlob2.bucket()); + assertEquals(sourceBlob2.name(), remoteUpdatedBlob2.name()); + assertEquals(updatedBlob2.contentType(), remoteUpdatedBlob2.contentType()); + + // Check gets + BlobInfo remoteBlob1 = response.gets().get(0).get(); + assertEquals(sourceBlob1.bucket(), remoteBlob1.bucket()); + assertEquals(sourceBlob1.name(), remoteBlob1.name()); + + assertTrue(storage.delete(BUCKET, sourceBlobName1)); + assertTrue(storage.delete(BUCKET, sourceBlobName2)); + } + @Test public void testBatchRequestFail() { String blobName = "test-batch-request-blob-fail"; - BlobInfo blob = BlobInfo.builder(bucket, blobName).build(); + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); assertNotNull(storage.create(blob)); - BlobInfo updatedBlob = blob.toBuilder().generation(-1L).build(); + BlobInfo updatedBlob = BlobInfo.builder(BUCKET, blobName, -1L).build(); BatchRequest batchRequest = BatchRequest.builder() .update(updatedBlob, Storage.BlobTargetOption.generationMatch()) - .delete(bucket, blobName, Storage.BlobSourceOption.generationMatch(-1L)) - .get(bucket, blobName, Storage.BlobSourceOption.generationMatch(-1L)) + .delete(BUCKET, blobName, Storage.BlobSourceOption.generationMatch(-1L)) + .delete(BlobId.of(BUCKET, blobName, -1L)) + .get(BUCKET, blobName, Storage.BlobGetOption.generationMatch(-1L)) + .get(BlobId.of(BUCKET, blobName, -1L)) .build(); - BatchResponse updateResponse = storage.apply(batchRequest); - assertEquals(1, updateResponse.updates().size()); - assertEquals(1, updateResponse.deletes().size()); - assertEquals(1, updateResponse.gets().size()); - assertTrue(updateResponse.updates().get(0).failed()); - assertTrue(updateResponse.gets().get(0).failed()); - assertTrue(updateResponse.deletes().get(0).failed()); - assertTrue(storage.delete(bucket, blobName)); + BatchResponse batchResponse = storage.submit(batchRequest); + assertEquals(1, batchResponse.updates().size()); + assertEquals(2, batchResponse.deletes().size()); + assertEquals(2, batchResponse.gets().size()); + assertTrue(batchResponse.updates().get(0).failed()); + assertTrue(batchResponse.gets().get(0).failed()); + assertFalse(batchResponse.gets().get(1).failed()); + assertNull(batchResponse.gets().get(1).get()); + assertTrue(batchResponse.deletes().get(0).failed()); + assertFalse(batchResponse.deletes().get(1).failed()); + assertFalse(batchResponse.deletes().get(1).get()); + assertTrue(storage.delete(BUCKET, blobName)); } @Test - public void testReadAndWriteChannels() throws UnsupportedEncodingException, IOException { + public void testReadAndWriteChannels() throws IOException { String blobName = "test-read-and-write-channels-blob"; - BlobInfo blob = BlobInfo.builder(bucket, blobName).build(); + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); byte[] stringBytes; - try (BlobWriteChannel writer = storage.writer(blob)) { + try (WriteChannel writer = storage.writer(blob)) { stringBytes = BLOB_STRING_CONTENT.getBytes(UTF_8); writer.write(ByteBuffer.wrap(BLOB_BYTE_CONTENT)); writer.write(ByteBuffer.wrap(stringBytes)); } ByteBuffer readBytes; ByteBuffer readStringBytes; - try (BlobReadChannel reader = storage.reader(blob.blobId())) { + try (ReadChannel reader = storage.reader(blob.blobId())) { readBytes = ByteBuffer.allocate(BLOB_BYTE_CONTENT.length); readStringBytes = ByteBuffer.allocate(stringBytes.length); reader.read(readBytes); @@ -412,60 +719,109 @@ public void testReadAndWriteChannels() throws UnsupportedEncodingException, IOEx } assertArrayEquals(BLOB_BYTE_CONTENT, readBytes.array()); assertEquals(BLOB_STRING_CONTENT, new String(readStringBytes.array(), UTF_8)); - assertTrue(storage.delete(bucket, blobName)); + assertTrue(storage.delete(BUCKET, blobName)); } @Test - public void testReadAndWriteSaveChannels() throws UnsupportedEncodingException, IOException { - String blobName = "test-read-and-write-save-channels-blob"; - BlobInfo blob = BlobInfo.builder(bucket, blobName).build(); + public void testReadAndWriteCaptureChannels() throws IOException { + String blobName = "test-read-and-write-capture-channels-blob"; + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); byte[] stringBytes; - BlobWriteChannel writer = storage.writer(blob); + WriteChannel writer = storage.writer(blob); stringBytes = BLOB_STRING_CONTENT.getBytes(UTF_8); writer.write(ByteBuffer.wrap(BLOB_BYTE_CONTENT)); - RestorableState writerState = writer.save(); - BlobWriteChannel secondWriter = writerState.restore(); + RestorableState writerState = writer.capture(); + WriteChannel secondWriter = writerState.restore(); secondWriter.write(ByteBuffer.wrap(stringBytes)); secondWriter.close(); ByteBuffer readBytes; ByteBuffer readStringBytes; - BlobReadChannel reader = storage.reader(blob.blobId()); + ReadChannel reader = storage.reader(blob.blobId()); reader.chunkSize(BLOB_BYTE_CONTENT.length); readBytes = ByteBuffer.allocate(BLOB_BYTE_CONTENT.length); reader.read(readBytes); - RestorableState readerState = reader.save(); - BlobReadChannel secondReader = readerState.restore(); + RestorableState readerState = reader.capture(); + ReadChannel secondReader = readerState.restore(); readStringBytes = ByteBuffer.allocate(stringBytes.length); secondReader.read(readStringBytes); reader.close(); secondReader.close(); assertArrayEquals(BLOB_BYTE_CONTENT, readBytes.array()); assertEquals(BLOB_STRING_CONTENT, new String(readStringBytes.array(), UTF_8)); - assertTrue(storage.delete(bucket, blobName)); + assertTrue(storage.delete(BUCKET, blobName)); } @Test - public void testReadChannelFail() throws UnsupportedEncodingException, IOException { + public void testReadChannelFail() throws IOException { String blobName = "test-read-channel-blob-fail"; - BlobInfo blob = BlobInfo.builder(bucket, blobName).build(); + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); assertNotNull(storage.create(blob)); - try (BlobReadChannel reader = + try (ReadChannel reader = storage.reader(blob.blobId(), Storage.BlobSourceOption.metagenerationMatch(-1L))) { reader.read(ByteBuffer.allocate(42)); fail("StorageException was expected"); } catch (StorageException ex) { // expected } - assertTrue(storage.delete(bucket, blobName)); + try (ReadChannel reader = + storage.reader(blob.blobId(), Storage.BlobSourceOption.generationMatch(-1L))) { + reader.read(ByteBuffer.allocate(42)); + fail("StorageException was expected"); + } catch (StorageException ex) { + // expected + } + BlobId blobIdWrongGeneration = BlobId.of(BUCKET, blobName, -1L); + try (ReadChannel reader = + storage.reader(blobIdWrongGeneration, Storage.BlobSourceOption.generationMatch())) { + reader.read(ByteBuffer.allocate(42)); + fail("StorageException was expected"); + } catch (StorageException ex) { + // expected + } + assertTrue(storage.delete(BUCKET, blobName)); } @Test - public void testWriteChannelFail() throws UnsupportedEncodingException, IOException { + public void testReadChannelFailUpdatedGeneration() throws IOException { + String blobName = "test-read-blob-fail-updated-generation"; + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); + Random random = new Random(); + int chunkSize = 1024; + int blobSize = 2 * chunkSize; + byte[] content = new byte[blobSize]; + random.nextBytes(content); + BlobInfo remoteBlob = storage.create(blob, content); + assertNotNull(remoteBlob); + assertEquals(blobSize, (long) remoteBlob.size()); + try (ReadChannel reader = storage.reader(blob.blobId())) { + reader.chunkSize(chunkSize); + ByteBuffer readBytes = ByteBuffer.allocate(chunkSize); + int numReadBytes = reader.read(readBytes); + assertEquals(chunkSize, numReadBytes); + assertArrayEquals(Arrays.copyOf(content, chunkSize), readBytes.array()); + try (WriteChannel writer = storage.writer(blob)) { + byte[] newContent = new byte[blobSize]; + random.nextBytes(newContent); + int numWrittenBytes = writer.write(ByteBuffer.wrap(newContent)); + assertEquals(blobSize, numWrittenBytes); + } + readBytes = ByteBuffer.allocate(chunkSize); + reader.read(readBytes); + fail("StorageException was expected"); + } catch (StorageException ex) { + StringBuilder messageBuilder = new StringBuilder(); + messageBuilder.append("Blob ").append(blob.blobId()).append(" was updated while reading"); + assertEquals(messageBuilder.toString(), ex.getMessage()); + } + assertTrue(storage.delete(BUCKET, blobName)); + } + + @Test + public void testWriteChannelFail() throws IOException { String blobName = "test-write-channel-blob-fail"; - BlobInfo blob = BlobInfo.builder(bucket, blobName).generation(-1L).build(); + BlobInfo blob = BlobInfo.builder(BUCKET, blobName, -1L).build(); try { - try (BlobWriteChannel writer = - storage.writer(blob, Storage.BlobWriteOption.generationMatch())) { + try (WriteChannel writer = storage.writer(blob, Storage.BlobWriteOption.generationMatch())) { writer.write(ByteBuffer.allocate(42)); } fail("StorageException was expected"); @@ -475,23 +831,23 @@ public void testWriteChannelFail() throws UnsupportedEncodingException, IOExcept } @Test - public void testWriteChannelExistingBlob() throws UnsupportedEncodingException, IOException { + public void testWriteChannelExistingBlob() throws IOException { String blobName = "test-write-channel-existing-blob"; - BlobInfo blob = BlobInfo.builder(bucket, blobName).build(); + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); BlobInfo remoteBlob = storage.create(blob); byte[] stringBytes; - try (BlobWriteChannel writer = storage.writer(remoteBlob)) { + try (WriteChannel writer = storage.writer(remoteBlob)) { stringBytes = BLOB_STRING_CONTENT.getBytes(UTF_8); writer.write(ByteBuffer.wrap(stringBytes)); } assertArrayEquals(stringBytes, storage.readAllBytes(blob.blobId())); - assertTrue(storage.delete(bucket, blobName)); + assertTrue(storage.delete(BUCKET, blobName)); } @Test public void testGetSignedUrl() throws IOException { String blobName = "test-get-signed-url-blob"; - BlobInfo blob = BlobInfo.builder(bucket, blobName).build(); + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); assertNotNull(storage.create(blob, BLOB_BYTE_CONTENT)); URL url = storage.signUrl(blob, 1, TimeUnit.HOURS); URLConnection connection = url.openConnection(); @@ -499,64 +855,64 @@ public void testGetSignedUrl() throws IOException { try (InputStream responseStream = connection.getInputStream()) { assertEquals(BLOB_BYTE_CONTENT.length, responseStream.read(readBytes)); assertArrayEquals(BLOB_BYTE_CONTENT, readBytes); - assertTrue(storage.delete(bucket, blobName)); + assertTrue(storage.delete(BUCKET, blobName)); } } @Test public void testPostSignedUrl() throws IOException { String blobName = "test-post-signed-url-blob"; - BlobInfo blob = BlobInfo.builder(bucket, blobName).build(); + BlobInfo blob = BlobInfo.builder(BUCKET, blobName).build(); assertNotNull(storage.create(blob)); URL url = storage.signUrl(blob, 1, TimeUnit.HOURS, Storage.SignUrlOption.httpMethod(HttpMethod.POST)); URLConnection connection = url.openConnection(); connection.setDoOutput(true); connection.connect(); - BlobInfo remoteBlob = storage.get(bucket, blobName); + BlobInfo remoteBlob = storage.get(BUCKET, blobName); assertNotNull(remoteBlob); - assertEquals(bucket, remoteBlob.bucket()); + assertEquals(blob.bucket(), remoteBlob.bucket()); assertEquals(blob.name(), remoteBlob.name()); - assertTrue(storage.delete(bucket, blobName)); + assertTrue(storage.delete(BUCKET, blobName)); } @Test public void testGetBlobs() { String sourceBlobName1 = "test-get-blobs-1"; String sourceBlobName2 = "test-get-blobs-2"; - BlobInfo sourceBlob1 = BlobInfo.builder(bucket, sourceBlobName1).build(); - BlobInfo sourceBlob2 = BlobInfo.builder(bucket, sourceBlobName2).build(); + BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); + BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); assertNotNull(storage.create(sourceBlob1)); assertNotNull(storage.create(sourceBlob2)); - List remoteInfos = storage.get(sourceBlob1.blobId(), sourceBlob2.blobId()); - assertEquals(sourceBlob1.bucket(), remoteInfos.get(0).bucket()); - assertEquals(sourceBlob1.name(), remoteInfos.get(0).name()); - assertEquals(sourceBlob2.bucket(), remoteInfos.get(1).bucket()); - assertEquals(sourceBlob2.name(), remoteInfos.get(1).name()); - assertTrue(storage.delete(bucket, sourceBlobName1)); - assertTrue(storage.delete(bucket, sourceBlobName2)); + List remoteBlobs = storage.get(sourceBlob1.blobId(), sourceBlob2.blobId()); + assertEquals(sourceBlob1.bucket(), remoteBlobs.get(0).bucket()); + assertEquals(sourceBlob1.name(), remoteBlobs.get(0).name()); + assertEquals(sourceBlob2.bucket(), remoteBlobs.get(1).bucket()); + assertEquals(sourceBlob2.name(), remoteBlobs.get(1).name()); + assertTrue(storage.delete(BUCKET, sourceBlobName1)); + assertTrue(storage.delete(BUCKET, sourceBlobName2)); } @Test public void testGetBlobsFail() { String sourceBlobName1 = "test-get-blobs-fail-1"; String sourceBlobName2 = "test-get-blobs-fail-2"; - BlobInfo sourceBlob1 = BlobInfo.builder(bucket, sourceBlobName1).build(); - BlobInfo sourceBlob2 = BlobInfo.builder(bucket, sourceBlobName2).build(); + BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); + BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); assertNotNull(storage.create(sourceBlob1)); List remoteBlobs = storage.get(sourceBlob1.blobId(), sourceBlob2.blobId()); assertEquals(sourceBlob1.bucket(), remoteBlobs.get(0).bucket()); assertEquals(sourceBlob1.name(), remoteBlobs.get(0).name()); assertNull(remoteBlobs.get(1)); - assertTrue(storage.delete(bucket, sourceBlobName1)); + assertTrue(storage.delete(BUCKET, sourceBlobName1)); } @Test public void testDeleteBlobs() { String sourceBlobName1 = "test-delete-blobs-1"; String sourceBlobName2 = "test-delete-blobs-2"; - BlobInfo sourceBlob1 = BlobInfo.builder(bucket, sourceBlobName1).build(); - BlobInfo sourceBlob2 = BlobInfo.builder(bucket, sourceBlobName2).build(); + BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); + BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); assertNotNull(storage.create(sourceBlob1)); assertNotNull(storage.create(sourceBlob2)); List deleteStatus = storage.delete(sourceBlob1.blobId(), sourceBlob2.blobId()); @@ -568,8 +924,8 @@ public void testDeleteBlobs() { public void testDeleteBlobsFail() { String sourceBlobName1 = "test-delete-blobs-fail-1"; String sourceBlobName2 = "test-delete-blobs-fail-2"; - BlobInfo sourceBlob1 = BlobInfo.builder(bucket, sourceBlobName1).build(); - BlobInfo sourceBlob2 = BlobInfo.builder(bucket, sourceBlobName2).build(); + BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); + BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); assertNotNull(storage.create(sourceBlob1)); List deleteStatus = storage.delete(sourceBlob1.blobId(), sourceBlob2.blobId()); assertTrue(deleteStatus.get(0)); @@ -580,8 +936,8 @@ public void testDeleteBlobsFail() { public void testUpdateBlobs() { String sourceBlobName1 = "test-update-blobs-1"; String sourceBlobName2 = "test-update-blobs-2"; - BlobInfo sourceBlob1 = BlobInfo.builder(bucket, sourceBlobName1).build(); - BlobInfo sourceBlob2 = BlobInfo.builder(bucket, sourceBlobName2).build(); + BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); + BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); BlobInfo remoteBlob1 = storage.create(sourceBlob1); BlobInfo remoteBlob2 = storage.create(sourceBlob2); assertNotNull(remoteBlob1); @@ -595,16 +951,16 @@ public void testUpdateBlobs() { assertEquals(sourceBlob2.bucket(), updatedBlobs.get(1).bucket()); assertEquals(sourceBlob2.name(), updatedBlobs.get(1).name()); assertEquals(CONTENT_TYPE, updatedBlobs.get(1).contentType()); - assertTrue(storage.delete(bucket, sourceBlobName1)); - assertTrue(storage.delete(bucket, sourceBlobName2)); + assertTrue(storage.delete(BUCKET, sourceBlobName1)); + assertTrue(storage.delete(BUCKET, sourceBlobName2)); } @Test public void testUpdateBlobsFail() { String sourceBlobName1 = "test-update-blobs-fail-1"; String sourceBlobName2 = "test-update-blobs-fail-2"; - BlobInfo sourceBlob1 = BlobInfo.builder(bucket, sourceBlobName1).build(); - BlobInfo sourceBlob2 = BlobInfo.builder(bucket, sourceBlobName2).build(); + BlobInfo sourceBlob1 = BlobInfo.builder(BUCKET, sourceBlobName1).build(); + BlobInfo sourceBlob2 = BlobInfo.builder(BUCKET, sourceBlobName2).build(); BlobInfo remoteBlob1 = storage.create(sourceBlob1); assertNotNull(remoteBlob1); List updatedBlobs = storage.update( @@ -614,6 +970,6 @@ public void testUpdateBlobsFail() { assertEquals(sourceBlob1.name(), updatedBlobs.get(0).name()); assertEquals(CONTENT_TYPE, updatedBlobs.get(0).contentType()); assertNull(updatedBlobs.get(1)); - assertTrue(storage.delete(bucket, sourceBlobName1)); + assertTrue(storage.delete(BUCKET, sourceBlobName1)); } } diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/OptionTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/OptionTest.java index 4665d04b2d82..2703ddb401c5 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/OptionTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/OptionTest.java @@ -31,7 +31,7 @@ public void testOption() { assertEquals("/", option.value()); } - @Test(expected=NullPointerException.class) + @Test(expected = NullPointerException.class) public void testIndexOutOfBoundsException() { new Option(null, "/"); } diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelperTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelperTest.java index 3cd67701e92b..d06f004fe84c 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelperTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/RemoteGcsHelperTest.java @@ -20,24 +20,21 @@ import static org.junit.Assert.assertTrue; import com.google.common.collect.ImmutableList; +import com.google.gcloud.Page; import com.google.gcloud.storage.testing.RemoteGcsHelper; +import org.easymock.EasyMock; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + import java.io.ByteArrayInputStream; import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.Paths; import java.util.Iterator; import java.util.List; -import java.util.UUID; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import org.easymock.EasyMock; -import org.junit.BeforeClass; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - public class RemoteGcsHelperTest { private static final String BUCKET_NAME = "bucket-name"; @@ -72,41 +69,38 @@ public class RemoteGcsHelperTest { private static final List BLOB_LIST = ImmutableList.of( BlobInfo.builder(BUCKET_NAME, "n1").build(), BlobInfo.builder(BUCKET_NAME, "n2").build()); - private static final StorageException RETRYABLE_EXCEPTION = new StorageException(409, "", true); - private static final StorageException FATAL_EXCEPTION = new StorageException(500, "", false); - private static final ListResult BLOB_LIST_RESULT = new ListResult() { + private static final StorageException RETRYABLE_EXCEPTION = new StorageException(409, ""); + private static final StorageException FATAL_EXCEPTION = new StorageException(500, ""); + private static final Page BLOB_PAGE = new Page() { @Override public String nextPageCursor() { - return "listResult"; + return "nextPageCursor"; } @Override - public ListResult nextPage() { + public Page nextPage() { return null; } @Override - public Iterator iterator() { + public Iterable values() { + return BLOB_LIST; + } + + @Override + public Iterator iterateAll() { return BLOB_LIST.iterator(); } }; - private static String KEY_PATH = "/does/not/exist/key." + UUID.randomUUID().toString() + ".json"; @Rule public ExpectedException thrown = ExpectedException.none(); - @BeforeClass - public static void beforeClass() { - while (Files.exists(Paths.get(JSON_KEY))) { - KEY_PATH = "/does/not/exist/key." + UUID.randomUUID().toString() + ".json"; - } - } - @Test public void testForceDelete() throws InterruptedException, ExecutionException { Storage storageMock = EasyMock.createMock(Storage.class); - EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(BLOB_LIST_RESULT); + EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(BLOB_PAGE); for (BlobInfo info : BLOB_LIST) { EasyMock.expect(storageMock.delete(BUCKET_NAME, info.name())).andReturn(true); } @@ -119,7 +113,7 @@ public void testForceDelete() throws InterruptedException, ExecutionException { @Test public void testForceDeleteTimeout() throws InterruptedException, ExecutionException { Storage storageMock = EasyMock.createMock(Storage.class); - EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(BLOB_LIST_RESULT).anyTimes(); + EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(BLOB_PAGE).anyTimes(); for (BlobInfo info : BLOB_LIST) { EasyMock.expect(storageMock.delete(BUCKET_NAME, info.name())).andReturn(true).anyTimes(); } @@ -132,7 +126,7 @@ public void testForceDeleteTimeout() throws InterruptedException, ExecutionExcep @Test public void testForceDeleteFail() throws InterruptedException, ExecutionException { Storage storageMock = EasyMock.createMock(Storage.class); - EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(BLOB_LIST_RESULT); + EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(BLOB_PAGE); for (BlobInfo info : BLOB_LIST) { EasyMock.expect(storageMock.delete(BUCKET_NAME, info.name())).andReturn(true); } @@ -146,6 +140,36 @@ public void testForceDeleteFail() throws InterruptedException, ExecutionExceptio } } + @Test + public void testForceDeleteNoTimeout() { + Storage storageMock = EasyMock.createMock(Storage.class); + EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(BLOB_PAGE); + for (BlobInfo info : BLOB_LIST) { + EasyMock.expect(storageMock.delete(BUCKET_NAME, info.name())).andReturn(true); + } + EasyMock.expect(storageMock.delete(BUCKET_NAME)).andReturn(true); + EasyMock.replay(storageMock); + RemoteGcsHelper.forceDelete(storageMock, BUCKET_NAME); + EasyMock.verify(storageMock); + } + + @Test + public void testForceDeleteNoTimeoutFail() { + Storage storageMock = EasyMock.createMock(Storage.class); + EasyMock.expect(storageMock.list(BUCKET_NAME)).andReturn(BLOB_PAGE); + for (BlobInfo info : BLOB_LIST) { + EasyMock.expect(storageMock.delete(BUCKET_NAME, info.name())).andReturn(true); + } + EasyMock.expect(storageMock.delete(BUCKET_NAME)).andThrow(FATAL_EXCEPTION); + EasyMock.replay(storageMock); + thrown.expect(StorageException.class); + try { + RemoteGcsHelper.forceDelete(storageMock, BUCKET_NAME); + } finally { + EasyMock.verify(storageMock); + } + } + @Test public void testCreateFromStream() { RemoteGcsHelper helper = RemoteGcsHelper.create(PROJECT_ID, JSON_KEY_STREAM); @@ -153,17 +177,10 @@ public void testCreateFromStream() { assertEquals(PROJECT_ID, options.projectId()); assertEquals(60000, options.connectTimeout()); assertEquals(60000, options.readTimeout()); - assertEquals(10, options.retryParams().getRetryMaxAttempts()); - assertEquals(6, options.retryParams().getRetryMinAttempts()); - assertEquals(30000, options.retryParams().getMaxRetryDelayMillis()); - assertEquals(120000, options.retryParams().getTotalRetryPeriodMillis()); - assertEquals(250, options.retryParams().getInitialRetryDelayMillis()); - } - - @Test - public void testCreateNoKey() { - thrown.expect(RemoteGcsHelper.GcsHelperException.class); - thrown.expectMessage(KEY_PATH + " (No such file or directory)"); - RemoteGcsHelper.create(PROJECT_ID, KEY_PATH); + assertEquals(10, options.retryParams().retryMaxAttempts()); + assertEquals(6, options.retryParams().retryMinAttempts()); + assertEquals(30000, options.retryParams().maxRetryDelayMillis()); + assertEquals(120000, options.retryParams().totalRetryPeriodMillis()); + assertEquals(250, options.retryParams().initialRetryDelayMillis()); } } diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/SerializationTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/SerializationTest.java index edda4ed17e25..8bef27cb0cd0 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/SerializationTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/SerializationTest.java @@ -21,8 +21,11 @@ import com.google.common.collect.ImmutableMap; import com.google.gcloud.AuthCredentials; +import com.google.gcloud.PageImpl; +import com.google.gcloud.ReadChannel; import com.google.gcloud.RestorableState; import com.google.gcloud.RetryParams; +import com.google.gcloud.WriteChannel; import com.google.gcloud.spi.StorageRpc; import com.google.gcloud.storage.Acl.Project.ProjectRole; @@ -44,6 +47,7 @@ public class SerializationTest { private static final Acl.Project ACL_PROJECT_ = new Acl.Project(ProjectRole.VIEWERS, "pid"); private static final Acl.User ACL_USER = new Acl.User("user"); private static final Acl.RawEntity ACL_RAW = new Acl.RawEntity("raw"); + private static final Acl ACL = Acl.of(ACL_DOMAIN, Acl.Role.OWNER); private static final BlobInfo BLOB_INFO = BlobInfo.builder("b", "n").build(); private static final BucketInfo BUCKET_INFO = BucketInfo.of("b"); private static final Cors.Origin ORIGIN = Cors.Origin.any(); @@ -54,8 +58,8 @@ public class SerializationTest { Collections.singletonList(BatchResponse.Result.of(true)), Collections.>emptyList(), Collections.>emptyList()); - private static final BaseListResult LIST_RESULT = - new BaseListResult<>(null, "c", Collections.singletonList(BlobInfo.builder("b", "n").build())); + private static final PageImpl PAGE_RESULT = new PageImpl<>( + null, "c", Collections.singletonList(BlobInfo.builder("b", "n").build())); private static final Storage.BlobListOption BLOB_LIST_OPTIONS = Storage.BlobListOption.maxResults(100); private static final Storage.BlobSourceOption BLOB_SOURCE_OPTIONS = @@ -81,8 +85,8 @@ public void testServiceOptions() throws Exception { options = options.toBuilder() .projectId("p2") - .retryParams(RetryParams.getDefaultInstance()) - .authCredentials(AuthCredentials.noCredentials()) + .retryParams(RetryParams.defaultInstance()) + .authCredentials(null) .pathDelimiter(":") .build(); serializedCopy = serializeAndDeserialize(options); @@ -91,11 +95,10 @@ public void testServiceOptions() throws Exception { @Test public void testModelAndRequests() throws Exception { - Serializable[] objects = {ACL_DOMAIN, ACL_GROUP, ACL_PROJECT_, ACL_USER, ACL_RAW, BLOB_INFO, - BUCKET_INFO, - ORIGIN, CORS, BATCH_REQUEST, BATCH_RESPONSE, LIST_RESULT, BLOB_LIST_OPTIONS, - BLOB_SOURCE_OPTIONS, BLOB_TARGET_OPTIONS, BUCKET_LIST_OPTIONS, BUCKET_SOURCE_OPTIONS, - BUCKET_TARGET_OPTIONS}; + Serializable[] objects = {ACL_DOMAIN, ACL_GROUP, ACL_PROJECT_, ACL_USER, ACL_RAW, ACL, + BLOB_INFO, BUCKET_INFO, ORIGIN, CORS, BATCH_REQUEST, BATCH_RESPONSE, PAGE_RESULT, + BLOB_LIST_OPTIONS, BLOB_SOURCE_OPTIONS, BLOB_TARGET_OPTIONS, BUCKET_LIST_OPTIONS, + BUCKET_SOURCE_OPTIONS, BUCKET_TARGET_OPTIONS}; for (Serializable obj : objects) { Object copy = serializeAndDeserialize(obj); assertEquals(obj, obj); @@ -109,29 +112,30 @@ public void testModelAndRequests() throws Exception { public void testReadChannelState() throws IOException, ClassNotFoundException { StorageOptions options = StorageOptions.builder() .projectId("p2") - .retryParams(RetryParams.getDefaultInstance()) - .authCredentials(AuthCredentials.noCredentials()) + .retryParams(RetryParams.defaultInstance()) .build(); - BlobReadChannel reader = - new BlobReadChannelImpl(options, BlobId.of("b", "n"), EMPTY_RPC_OPTIONS); - RestorableState state = reader.save(); - RestorableState deserializedState = serializeAndDeserialize(state); + ReadChannel reader = + new BlobReadChannel(options, BlobId.of("b", "n"), EMPTY_RPC_OPTIONS); + RestorableState state = reader.capture(); + RestorableState deserializedState = serializeAndDeserialize(state); assertEquals(state, deserializedState); assertEquals(state.hashCode(), deserializedState.hashCode()); assertEquals(state.toString(), deserializedState.toString()); + reader.close(); } @Test public void testWriteChannelState() throws IOException, ClassNotFoundException { StorageOptions options = StorageOptions.builder() .projectId("p2") - .retryParams(RetryParams.getDefaultInstance()) - .authCredentials(AuthCredentials.noCredentials()) + .retryParams(RetryParams.defaultInstance()) .build(); - BlobWriteChannelImpl writer = new BlobWriteChannelImpl( - options, BlobInfo.builder(BlobId.of("b", "n")).build(), "upload-id"); - RestorableState state = writer.save(); - RestorableState deserializedState = serializeAndDeserialize(state); + // avoid closing when you don't want partial writes to GCS upon failure + @SuppressWarnings("resource") + BlobWriteChannel writer = + new BlobWriteChannel(options, BlobInfo.builder(BlobId.of("b", "n")).build(), "upload-id"); + RestorableState state = writer.capture(); + RestorableState deserializedState = serializeAndDeserialize(state); assertEquals(state, deserializedState); assertEquals(state.hashCode(), deserializedState.hashCode()); assertEquals(state.toString(), deserializedState.toString()); diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageExceptionTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageExceptionTest.java new file mode 100644 index 000000000000..cf1d4b394e57 --- /dev/null +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageExceptionTest.java @@ -0,0 +1,125 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.storage; + +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.api.client.googleapis.json.GoogleJsonError; +import com.google.gcloud.BaseServiceException; +import com.google.gcloud.RetryHelper.RetryHelperException; + +import org.junit.Test; + +import java.io.IOException; +import java.net.SocketTimeoutException; + +public class StorageExceptionTest { + + @Test + public void testStorageException() { + StorageException exception = new StorageException(500, "message"); + assertEquals(500, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new StorageException(502, "message"); + assertEquals(502, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new StorageException(503, "message"); + assertEquals(503, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new StorageException(504, "message"); + assertEquals(504, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new StorageException(429, "message"); + assertEquals(429, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new StorageException(408, "message"); + assertEquals(408, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + + exception = new StorageException(400, "message"); + assertEquals(400, exception.code()); + assertEquals("message", exception.getMessage()); + assertNull(exception.reason()); + assertFalse(exception.retryable()); + assertTrue(exception.idempotent()); + + IOException cause = new SocketTimeoutException(); + exception = new StorageException(cause); + assertNull(exception.reason()); + assertNull(exception.getMessage()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + assertEquals(cause, exception.getCause()); + + GoogleJsonError error = new GoogleJsonError(); + error.setCode(503); + error.setMessage("message"); + exception = new StorageException(error); + assertEquals(503, exception.code()); + assertEquals("message", exception.getMessage()); + assertTrue(exception.retryable()); + assertTrue(exception.idempotent()); + } + + @Test + public void testTranslateAndThrow() throws Exception { + StorageException cause = new StorageException(503, "message"); + RetryHelperException exceptionMock = createMock(RetryHelperException.class); + expect(exceptionMock.getCause()).andReturn(cause).times(2); + replay(exceptionMock); + try { + StorageException.translateAndThrow(exceptionMock); + } catch (BaseServiceException ex) { + assertEquals(503, ex.code()); + assertEquals("message", ex.getMessage()); + assertTrue(ex.retryable()); + assertTrue(ex.idempotent()); + } finally { + verify(exceptionMock); + } + } +} diff --git a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java index b3a6fe36859e..f32a51507857 100644 --- a/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java +++ b/gcloud-java-storage/src/test/java/com/google/gcloud/storage/StorageImplTest.java @@ -17,8 +17,8 @@ package com.google.gcloud.storage; import static java.nio.charset.StandardCharsets.UTF_8; -import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; @@ -32,20 +32,24 @@ import com.google.common.collect.Maps; import com.google.common.io.BaseEncoding; import com.google.gcloud.AuthCredentials.ServiceAccountAuthCredentials; +import com.google.gcloud.Page; +import com.google.gcloud.ReadChannel; import com.google.gcloud.RetryParams; import com.google.gcloud.ServiceOptions; +import com.google.gcloud.WriteChannel; import com.google.gcloud.spi.StorageRpc; import com.google.gcloud.spi.StorageRpc.Tuple; +import com.google.gcloud.spi.StorageRpcFactory; +import com.google.gcloud.storage.Storage.CopyRequest; import org.easymock.Capture; import org.easymock.EasyMock; - import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; -import org.junit.rules.ExpectedException; import org.junit.Test; +import org.junit.rules.ExpectedException; import java.io.ByteArrayInputStream; import java.io.IOException; @@ -86,8 +90,8 @@ public class StorageImplTest { private static final BucketInfo BUCKET_INFO2 = BucketInfo.builder(BUCKET_NAME2).build(); // BlobInfo objects - private static final BlobInfo BLOB_INFO1 = BlobInfo.builder(BUCKET_NAME1, BLOB_NAME1) - .metageneration(42L).generation(24L).contentType("application/json").md5("md5string").build(); + private static final BlobInfo BLOB_INFO1 = BlobInfo.builder(BUCKET_NAME1, BLOB_NAME1, 24L) + .metageneration(42L).contentType("application/json").md5("md5string").build(); private static final BlobInfo BLOB_INFO2 = BlobInfo.builder(BUCKET_NAME1, BLOB_NAME2).build(); private static final BlobInfo BLOB_INFO3 = BlobInfo.builder(BUCKET_NAME1, BLOB_NAME3).build(); @@ -135,17 +139,40 @@ public class StorageImplTest { private static final Storage.BlobWriteOption BLOB_WRITE_CRC2C = Storage.BlobWriteOption.crc32cMatch(); - // Bucket source options + // Bucket get/source options private static final Storage.BucketSourceOption BUCKET_SOURCE_METAGENERATION = Storage.BucketSourceOption.metagenerationMatch(BUCKET_INFO1.metageneration()); private static final Map BUCKET_SOURCE_OPTIONS = ImmutableMap.of( StorageRpc.Option.IF_METAGENERATION_MATCH, BUCKET_SOURCE_METAGENERATION.value()); + private static final Storage.BucketGetOption BUCKET_GET_METAGENERATION = + Storage.BucketGetOption.metagenerationMatch(BUCKET_INFO1.metageneration()); + private static final Storage.BucketGetOption BUCKET_GET_FIELDS = + Storage.BucketGetOption.fields(Storage.BucketField.LOCATION, Storage.BucketField.ACL); + private static final Storage.BucketGetOption BUCKET_GET_EMPTY_FIELDS = + Storage.BucketGetOption.fields(); + private static final Map BUCKET_GET_OPTIONS = ImmutableMap.of( + StorageRpc.Option.IF_METAGENERATION_MATCH, BUCKET_SOURCE_METAGENERATION.value()); - // Blob source options + // Blob get/source options + private static final Storage.BlobGetOption BLOB_GET_METAGENERATION = + Storage.BlobGetOption.metagenerationMatch(BLOB_INFO1.metageneration()); + private static final Storage.BlobGetOption BLOB_GET_GENERATION = + Storage.BlobGetOption.generationMatch(BLOB_INFO1.generation()); + private static final Storage.BlobGetOption BLOB_GET_GENERATION_FROM_BLOB_ID = + Storage.BlobGetOption.generationMatch(); + private static final Storage.BlobGetOption BLOB_GET_FIELDS = + Storage.BlobGetOption.fields(Storage.BlobField.CONTENT_TYPE, Storage.BlobField.CRC32C); + private static final Storage.BlobGetOption BLOB_GET_EMPTY_FIELDS = + Storage.BlobGetOption.fields(); + private static final Map BLOB_GET_OPTIONS = ImmutableMap.of( + StorageRpc.Option.IF_METAGENERATION_MATCH, BLOB_GET_METAGENERATION.value(), + StorageRpc.Option.IF_GENERATION_MATCH, BLOB_GET_GENERATION.value()); private static final Storage.BlobSourceOption BLOB_SOURCE_METAGENERATION = Storage.BlobSourceOption.metagenerationMatch(BLOB_INFO1.metageneration()); private static final Storage.BlobSourceOption BLOB_SOURCE_GENERATION = Storage.BlobSourceOption.generationMatch(BLOB_INFO1.generation()); + private static final Storage.BlobSourceOption BLOB_SOURCE_GENERATION_FROM_BLOB_ID = + Storage.BlobSourceOption.generationMatch(); private static final Map BLOB_SOURCE_OPTIONS = ImmutableMap.of( StorageRpc.Option.IF_METAGENERATION_MATCH, BLOB_SOURCE_METAGENERATION.value(), StorageRpc.Option.IF_GENERATION_MATCH, BLOB_SOURCE_GENERATION.value()); @@ -158,6 +185,10 @@ public class StorageImplTest { Storage.BucketListOption.maxResults(42L); private static final Storage.BucketListOption BUCKET_LIST_PREFIX = Storage.BucketListOption.prefix("prefix"); + private static final Storage.BucketListOption BUCKET_LIST_FIELDS = + Storage.BucketListOption.fields(Storage.BucketField.LOCATION, Storage.BucketField.ACL); + private static final Storage.BucketListOption BUCKET_LIST_EMPTY_FIELDS = + Storage.BucketListOption.fields(); private static final Map BUCKET_LIST_OPTIONS = ImmutableMap.of( StorageRpc.Option.MAX_RESULTS, BUCKET_LIST_MAX_RESULT.value(), StorageRpc.Option.PREFIX, BUCKET_LIST_PREFIX.value()); @@ -167,6 +198,10 @@ public class StorageImplTest { Storage.BlobListOption.maxResults(42L); private static final Storage.BlobListOption BLOB_LIST_PREFIX = Storage.BlobListOption.prefix("prefix"); + private static final Storage.BlobListOption BLOB_LIST_FIELDS = + Storage.BlobListOption.fields(Storage.BlobField.CONTENT_TYPE, Storage.BlobField.MD5HASH); + private static final Storage.BlobListOption BLOB_LIST_EMPTY_FIELDS = + Storage.BlobListOption.fields(); private static final Map BLOB_LIST_OPTIONS = ImmutableMap.of( StorageRpc.Option.MAX_RESULTS, BLOB_LIST_MAX_RESULT.value(), StorageRpc.Option.PREFIX, BLOB_LIST_PREFIX.value()); @@ -194,10 +229,12 @@ public long millis() { } }; + private static final String ACCOUNT = "account"; private static PrivateKey privateKey; private static PublicKey publicKey; - private StorageOptions optionsMock; + private StorageOptions options; + private StorageRpcFactory rpcFactoryMock; private StorageRpc storageRpcMock; private Storage storage; @@ -216,44 +253,48 @@ public static void beforeClass() throws NoSuchAlgorithmException, InvalidKeySpec } @Before - public void setUp() throws IOException, InterruptedException { - optionsMock = EasyMock.createMock(StorageOptions.class); + public void setUp() { + rpcFactoryMock = EasyMock.createMock(StorageRpcFactory.class); storageRpcMock = EasyMock.createMock(StorageRpc.class); + EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(StorageOptions.class))) + .andReturn(storageRpcMock); + EasyMock.replay(rpcFactoryMock); + options = StorageOptions.builder() + .projectId("projectId") + .clock(TIME_SOURCE) + .serviceRpcFactory(rpcFactoryMock) + .retryParams(RetryParams.noRetries()) + .build(); } @After public void tearDown() throws Exception { - EasyMock.verify(optionsMock, storageRpcMock); + EasyMock.verify(rpcFactoryMock, storageRpcMock); } @Test public void testGetOptions() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); - assertSame(optionsMock, storage.options()); + EasyMock.replay(storageRpcMock); + storage = options.service(); + assertSame(options, storage.options()); } @Test public void testCreateBucket() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); EasyMock.expect(storageRpcMock.create(BUCKET_INFO1.toPb(), EMPTY_RPC_OPTIONS)) .andReturn(BUCKET_INFO1.toPb()); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); BucketInfo bucket = storage.create(BUCKET_INFO1); assertEquals(BUCKET_INFO1.toPb(), bucket.toPb()); } @Test public void testCreateBucketWithOptions() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); EasyMock.expect(storageRpcMock.create(BUCKET_INFO1.toPb(), BUCKET_TARGET_OPTIONS)) .andReturn(BUCKET_INFO1.toPb()); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); BucketInfo bucket = storage.create(BUCKET_INFO1, BUCKET_TARGET_METAGENERATION, BUCKET_TARGET_PREDEFINED_ACL); assertEquals(BUCKET_INFO1, bucket); @@ -261,16 +302,14 @@ public void testCreateBucketWithOptions() { @Test public void testCreateBlob() throws IOException { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); Capture capturedStream = Capture.newInstance(); EasyMock.expect(storageRpcMock.create( EasyMock.eq(BLOB_INFO1.toBuilder().md5(CONTENT_MD5).crc32c(CONTENT_CRC32C).build().toPb()), EasyMock.capture(capturedStream), EasyMock.eq(EMPTY_RPC_OPTIONS))) .andReturn(BLOB_INFO1.toPb()); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); BlobInfo blob = storage.create(BLOB_INFO1, BLOB_CONTENT); assertEquals(BLOB_INFO1, blob); ByteArrayInputStream byteStream = capturedStream.getValue(); @@ -282,8 +321,6 @@ public void testCreateBlob() throws IOException { @Test public void testCreateEmptyBlob() throws IOException { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); Capture capturedStream = Capture.newInstance(); EasyMock.expect(storageRpcMock.create( EasyMock.eq(BLOB_INFO1.toBuilder() @@ -294,8 +331,8 @@ public void testCreateEmptyBlob() throws IOException { EasyMock.capture(capturedStream), EasyMock.eq(EMPTY_RPC_OPTIONS))) .andReturn(BLOB_INFO1.toPb()); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); BlobInfo blob = storage.create(BLOB_INFO1); assertEquals(BLOB_INFO1, blob); ByteArrayInputStream byteStream = capturedStream.getValue(); @@ -305,8 +342,6 @@ public void testCreateEmptyBlob() throws IOException { @Test public void testCreateBlobWithOptions() throws IOException { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); Capture capturedStream = Capture.newInstance(); EasyMock.expect(storageRpcMock.create( EasyMock.eq(BLOB_INFO1.toBuilder() @@ -317,8 +352,8 @@ public void testCreateBlobWithOptions() throws IOException { EasyMock.capture(capturedStream), EasyMock.eq(BLOB_TARGET_OPTIONS_CREATE))) .andReturn(BLOB_INFO1.toPb()); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); BlobInfo blob = storage.create(BLOB_INFO1, BLOB_CONTENT, BLOB_TARGET_METAGENERATION, BLOB_TARGET_NOT_EXIST, BLOB_TARGET_PREDEFINED_ACL); @@ -331,177 +366,335 @@ public void testCreateBlobWithOptions() throws IOException { } @Test - public void testCreateBlobFromStream() throws IOException { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); + public void testCreateBlobFromStream() { ByteArrayInputStream fileStream = new ByteArrayInputStream(BLOB_CONTENT); BlobInfo.Builder infoBuilder = BLOB_INFO1.toBuilder(); BlobInfo infoWithHashes = infoBuilder.md5(CONTENT_MD5).crc32c(CONTENT_CRC32C).build(); BlobInfo infoWithoutHashes = infoBuilder.md5(null).crc32c(null).build(); EasyMock.expect(storageRpcMock.create(infoWithoutHashes.toPb(), fileStream, EMPTY_RPC_OPTIONS)) .andReturn(BLOB_INFO1.toPb()); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); BlobInfo blob = storage.create(infoWithHashes, fileStream); assertEquals(BLOB_INFO1, blob); } @Test public void testGetBucket() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); EasyMock.expect(storageRpcMock.get(BucketInfo.of(BUCKET_NAME1).toPb(), EMPTY_RPC_OPTIONS)) .andReturn(BUCKET_INFO1.toPb()); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); BucketInfo bucket = storage.get(BUCKET_NAME1); assertEquals(BUCKET_INFO1, bucket); } @Test public void testGetBucketWithOptions() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); - EasyMock.expect(storageRpcMock.get(BucketInfo.of(BUCKET_NAME1).toPb(), BUCKET_SOURCE_OPTIONS)) + EasyMock.expect(storageRpcMock.get(BucketInfo.of(BUCKET_NAME1).toPb(), BUCKET_GET_OPTIONS)) .andReturn(BUCKET_INFO1.toPb()); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); - BucketInfo bucket = - storage.get(BUCKET_NAME1, - Storage.BucketSourceOption.metagenerationMatch(BUCKET_INFO1.metageneration())); + EasyMock.replay(storageRpcMock); + storage = options.service(); + BucketInfo bucket = storage.get(BUCKET_NAME1, BUCKET_GET_METAGENERATION); assertEquals(BUCKET_INFO1, bucket); } + @Test + public void testGetBucketWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + EasyMock.expect(storageRpcMock.get(EasyMock.eq(BucketInfo.of(BUCKET_NAME1).toPb()), + EasyMock.capture(capturedOptions))).andReturn(BUCKET_INFO1.toPb()); + EasyMock.replay(storageRpcMock); + storage = options.service(); + BucketInfo bucket = storage.get(BUCKET_NAME1, BUCKET_GET_METAGENERATION, BUCKET_GET_FIELDS); + assertEquals(BUCKET_GET_METAGENERATION.value(), + capturedOptions.getValue().get(BUCKET_GET_METAGENERATION.rpcOption())); + String selector = (String) capturedOptions.getValue().get(BLOB_GET_FIELDS.rpcOption()); + assertTrue(selector.contains("name")); + assertTrue(selector.contains("location")); + assertTrue(selector.contains("acl")); + assertEquals(17, selector.length()); + assertEquals(BUCKET_INFO1.name(), bucket.name()); + } + + @Test + public void testGetBucketWithEmptyFields() { + Capture> capturedOptions = Capture.newInstance(); + EasyMock.expect(storageRpcMock.get(EasyMock.eq(BucketInfo.of(BUCKET_NAME1).toPb()), + EasyMock.capture(capturedOptions))).andReturn(BUCKET_INFO1.toPb()); + EasyMock.replay(storageRpcMock); + storage = options.service(); + BucketInfo bucket = storage.get(BUCKET_NAME1, BUCKET_GET_METAGENERATION, + BUCKET_GET_EMPTY_FIELDS); + assertEquals(BUCKET_GET_METAGENERATION.value(), + capturedOptions.getValue().get(BUCKET_GET_METAGENERATION.rpcOption())); + String selector = (String) capturedOptions.getValue().get(BLOB_GET_FIELDS.rpcOption()); + assertTrue(selector.contains("name")); + assertEquals(4, selector.length()); + assertEquals(BUCKET_INFO1.name(), bucket.name()); + } + @Test public void testGetBlob() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); EasyMock.expect( storageRpcMock.get(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), EMPTY_RPC_OPTIONS)) .andReturn(BLOB_INFO1.toPb()); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); BlobInfo blob = storage.get(BUCKET_NAME1, BLOB_NAME1); assertEquals(BLOB_INFO1, blob); } @Test public void testGetBlobWithOptions() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); EasyMock.expect( - storageRpcMock.get(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), BLOB_SOURCE_OPTIONS)) + storageRpcMock.get(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), BLOB_GET_OPTIONS)) + .andReturn(BLOB_INFO1.toPb()); + EasyMock.replay(storageRpcMock); + storage = options.service(); + BlobInfo blob = + storage.get(BUCKET_NAME1, BLOB_NAME1, BLOB_GET_METAGENERATION, BLOB_GET_GENERATION); + assertEquals(BLOB_INFO1, blob); + } + + @Test + public void testGetBlobWithOptionsFromBlobId() { + EasyMock.expect( + storageRpcMock.get(BLOB_INFO1.blobId().toPb(), BLOB_GET_OPTIONS)) .andReturn(BLOB_INFO1.toPb()); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); BlobInfo blob = - storage.get(BUCKET_NAME1, BLOB_NAME1, BLOB_SOURCE_METAGENERATION, BLOB_SOURCE_GENERATION); + storage.get(BLOB_INFO1.blobId(), BLOB_GET_METAGENERATION, BLOB_GET_GENERATION_FROM_BLOB_ID); + assertEquals(BLOB_INFO1, blob); + } + + @Test + public void testGetBlobWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + EasyMock.expect(storageRpcMock.get(EasyMock.eq(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb()), + EasyMock.capture(capturedOptions))).andReturn(BLOB_INFO1.toPb()); + EasyMock.replay(storageRpcMock); + storage = options.service(); + BlobInfo blob = storage.get(BUCKET_NAME1, BLOB_NAME1, BLOB_GET_METAGENERATION, + BLOB_GET_GENERATION, BLOB_GET_FIELDS); + assertEquals(BLOB_GET_METAGENERATION.value(), + capturedOptions.getValue().get(BLOB_GET_METAGENERATION.rpcOption())); + assertEquals(BLOB_GET_GENERATION.value(), + capturedOptions.getValue().get(BLOB_GET_GENERATION.rpcOption())); + String selector = (String) capturedOptions.getValue().get(BLOB_GET_FIELDS.rpcOption()); + assertTrue(selector.contains("bucket")); + assertTrue(selector.contains("name")); + assertTrue(selector.contains("contentType")); + assertTrue(selector.contains("crc32c")); + assertEquals(30, selector.length()); + assertEquals(BLOB_INFO1, blob); + } + + @Test + public void testGetBlobWithEmptyFields() { + Capture> capturedOptions = Capture.newInstance(); + EasyMock.expect(storageRpcMock.get(EasyMock.eq(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb()), + EasyMock.capture(capturedOptions))).andReturn(BLOB_INFO1.toPb()); + EasyMock.replay(storageRpcMock); + storage = options.service(); + BlobInfo blob = storage.get(BUCKET_NAME1, BLOB_NAME1, BLOB_GET_METAGENERATION, + BLOB_GET_GENERATION, BLOB_GET_EMPTY_FIELDS); + assertEquals(BLOB_GET_METAGENERATION.value(), + capturedOptions.getValue().get(BLOB_GET_METAGENERATION.rpcOption())); + assertEquals(BLOB_GET_GENERATION.value(), + capturedOptions.getValue().get(BLOB_GET_GENERATION.rpcOption())); + String selector = (String) capturedOptions.getValue().get(BLOB_GET_FIELDS.rpcOption()); + assertTrue(selector.contains("bucket")); + assertTrue(selector.contains("name")); + assertEquals(11, selector.length()); assertEquals(BLOB_INFO1, blob); } @Test public void testListBuckets() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock).times(2); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); String cursor = "cursor"; ImmutableList bucketList = ImmutableList.of(BUCKET_INFO1, BUCKET_INFO2); Tuple> result = Tuple.of(cursor, Iterables.transform(bucketList, BucketInfo.TO_PB_FUNCTION)); EasyMock.expect(storageRpcMock.list(EMPTY_RPC_OPTIONS)).andReturn(result); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); - ListResult listResult = storage.list(); - assertEquals(cursor, listResult.nextPageCursor()); - assertArrayEquals(bucketList.toArray(), Iterables.toArray(listResult, BucketInfo.class)); + EasyMock.replay(storageRpcMock); + storage = options.service(); + Page page = storage.list(); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(bucketList.toArray(), Iterables.toArray(page.values(), BucketInfo.class)); } @Test public void testListBucketsEmpty() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock).times(2); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); EasyMock.expect(storageRpcMock.list(EMPTY_RPC_OPTIONS)).andReturn( Tuple.>of(null, null)); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); - ListResult listResult = storage.list(); - assertNull(listResult.nextPageCursor()); + EasyMock.replay(storageRpcMock); + storage = options.service(); + Page page = storage.list(); + assertNull(page.nextPageCursor()); assertArrayEquals(ImmutableList.of().toArray(), - Iterables.toArray(listResult, BucketInfo.class)); + Iterables.toArray(page.values(), BucketInfo.class)); } @Test public void testListBucketsWithOptions() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock).times(2); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); String cursor = "cursor"; ImmutableList bucketList = ImmutableList.of(BUCKET_INFO1, BUCKET_INFO2); Tuple> result = Tuple.of(cursor, Iterables.transform(bucketList, BucketInfo.TO_PB_FUNCTION)); EasyMock.expect(storageRpcMock.list(BUCKET_LIST_OPTIONS)).andReturn(result); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); - ListResult listResult = storage.list(BUCKET_LIST_MAX_RESULT, BUCKET_LIST_PREFIX); - assertEquals(cursor, listResult.nextPageCursor()); - assertArrayEquals(bucketList.toArray(), Iterables.toArray(listResult, BucketInfo.class)); + EasyMock.replay(storageRpcMock); + storage = options.service(); + Page page = storage.list(BUCKET_LIST_MAX_RESULT, BUCKET_LIST_PREFIX); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(bucketList.toArray(), Iterables.toArray(page.values(), BucketInfo.class)); + } + + @Test + public void testListBucketsWithSelectedFields() { + String cursor = "cursor"; + Capture> capturedOptions = Capture.newInstance(); + ImmutableList bucketList = ImmutableList.of(BUCKET_INFO1, BUCKET_INFO2); + Tuple> result = + Tuple.of(cursor, Iterables.transform(bucketList, BucketInfo.TO_PB_FUNCTION)); + EasyMock.expect(storageRpcMock.list(EasyMock.capture(capturedOptions))).andReturn(result); + EasyMock.replay(storageRpcMock); + storage = options.service(); + Page page = storage.list(BUCKET_LIST_FIELDS); + String selector = (String) capturedOptions.getValue().get(BLOB_LIST_FIELDS.rpcOption()); + assertTrue(selector.contains("items")); + assertTrue(selector.contains("name")); + assertTrue(selector.contains("acl")); + assertTrue(selector.contains("location")); + assertEquals(24, selector.length()); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(bucketList.toArray(), Iterables.toArray(page.values(), BucketInfo.class)); + } + + @Test + public void testListBucketsWithEmptyFields() { + String cursor = "cursor"; + Capture> capturedOptions = Capture.newInstance(); + ImmutableList bucketList = ImmutableList.of(BUCKET_INFO1, BUCKET_INFO2); + Tuple> result = + Tuple.of(cursor, Iterables.transform(bucketList, BucketInfo.TO_PB_FUNCTION)); + EasyMock.expect(storageRpcMock.list(EasyMock.capture(capturedOptions))).andReturn(result); + EasyMock.replay(storageRpcMock); + storage = options.service(); + Page page = storage.list(BUCKET_LIST_EMPTY_FIELDS); + String selector = (String) capturedOptions.getValue().get(BLOB_LIST_FIELDS.rpcOption()); + assertTrue(selector.contains("items")); + assertTrue(selector.contains("name")); + assertEquals(11, selector.length()); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(bucketList.toArray(), Iterables.toArray(page.values(), BucketInfo.class)); } @Test public void testListBlobs() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock).times(2); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); String cursor = "cursor"; ImmutableList blobList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2); Tuple> result = Tuple.of(cursor, Iterables.transform(blobList, BlobInfo.TO_PB_FUNCTION)); EasyMock.expect(storageRpcMock.list(BUCKET_NAME1, EMPTY_RPC_OPTIONS)).andReturn(result); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); - ListResult listResult = storage.list(BUCKET_NAME1); - assertEquals(cursor, listResult.nextPageCursor()); - assertArrayEquals(blobList.toArray(), Iterables.toArray(listResult, BlobInfo.class)); + EasyMock.replay(storageRpcMock); + storage = options.service(); + Page page = storage.list(BUCKET_NAME1); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(blobList.toArray(), Iterables.toArray(page.values(), BlobInfo.class)); } @Test public void testListBlobsEmpty() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock).times(2); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); EasyMock.expect(storageRpcMock.list(BUCKET_NAME1, EMPTY_RPC_OPTIONS)) - .andReturn( - Tuple.>of(null, - null)); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); - ListResult listResult = storage.list(BUCKET_NAME1); - assertNull(listResult.nextPageCursor()); - assertArrayEquals(ImmutableList.of().toArray(), Iterables.toArray(listResult, BlobInfo.class)); + .andReturn(Tuple.>of( + null, null)); + EasyMock.replay(storageRpcMock); + storage = options.service(); + Page page = storage.list(BUCKET_NAME1); + assertNull(page.nextPageCursor()); + assertArrayEquals(ImmutableList.of().toArray(), + Iterables.toArray(page.values(), BlobInfo.class)); } @Test public void testListBlobsWithOptions() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock).times(2); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); String cursor = "cursor"; ImmutableList blobList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2); Tuple> result = Tuple.of(cursor, Iterables.transform(blobList, BlobInfo.TO_PB_FUNCTION)); EasyMock.expect(storageRpcMock.list(BUCKET_NAME1, BLOB_LIST_OPTIONS)).andReturn(result); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); - ListResult listResult = - storage.list(BUCKET_NAME1, BLOB_LIST_MAX_RESULT, BLOB_LIST_PREFIX); - assertEquals(cursor, listResult.nextPageCursor()); - assertArrayEquals(blobList.toArray(), Iterables.toArray(listResult, BlobInfo.class)); + EasyMock.replay(storageRpcMock); + storage = options.service(); + Page page = storage.list(BUCKET_NAME1, BLOB_LIST_MAX_RESULT, BLOB_LIST_PREFIX); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(blobList.toArray(), Iterables.toArray(page.values(), BlobInfo.class)); + } + + @Test + public void testListBlobsWithSelectedFields() { + String cursor = "cursor"; + Capture> capturedOptions = Capture.newInstance(); + ImmutableList blobList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2); + Tuple> result = + Tuple.of(cursor, Iterables.transform(blobList, BlobInfo.TO_PB_FUNCTION)); + EasyMock.expect( + storageRpcMock.list(EasyMock.eq(BUCKET_NAME1), EasyMock.capture(capturedOptions))) + .andReturn(result); + EasyMock.replay(storageRpcMock); + storage = options.service(); + Page page = + storage.list(BUCKET_NAME1, BLOB_LIST_MAX_RESULT, BLOB_LIST_PREFIX, BLOB_LIST_FIELDS); + assertEquals(BLOB_LIST_MAX_RESULT.value(), + capturedOptions.getValue().get(BLOB_LIST_MAX_RESULT.rpcOption())); + assertEquals(BLOB_LIST_PREFIX.value(), + capturedOptions.getValue().get(BLOB_LIST_PREFIX.rpcOption())); + String selector = (String) capturedOptions.getValue().get(BLOB_LIST_FIELDS.rpcOption()); + assertTrue(selector.contains("items")); + assertTrue(selector.contains("bucket")); + assertTrue(selector.contains("name")); + assertTrue(selector.contains("contentType")); + assertTrue(selector.contains("md5Hash")); + assertEquals(38, selector.length()); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(blobList.toArray(), Iterables.toArray(page.values(), BlobInfo.class)); + } + + @Test + public void testListBlobsWithEmptyFields() { + String cursor = "cursor"; + Capture> capturedOptions = Capture.newInstance(); + ImmutableList blobList = ImmutableList.of(BLOB_INFO1, BLOB_INFO2); + Tuple> result = + Tuple.of(cursor, Iterables.transform(blobList, BlobInfo.TO_PB_FUNCTION)); + EasyMock.expect( + storageRpcMock.list(EasyMock.eq(BUCKET_NAME1), EasyMock.capture(capturedOptions))) + .andReturn(result); + EasyMock.replay(storageRpcMock); + storage = options.service(); + Page page = + storage.list(BUCKET_NAME1, BLOB_LIST_MAX_RESULT, BLOB_LIST_PREFIX, BLOB_LIST_EMPTY_FIELDS); + assertEquals(BLOB_LIST_MAX_RESULT.value(), + capturedOptions.getValue().get(BLOB_LIST_MAX_RESULT.rpcOption())); + assertEquals(BLOB_LIST_PREFIX.value(), + capturedOptions.getValue().get(BLOB_LIST_PREFIX.rpcOption())); + String selector = (String) capturedOptions.getValue().get(BLOB_LIST_EMPTY_FIELDS.rpcOption()); + assertTrue(selector.contains("items")); + assertTrue(selector.contains("bucket")); + assertTrue(selector.contains("name")); + assertEquals(18, selector.length()); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(blobList.toArray(), Iterables.toArray(page.values(), BlobInfo.class)); } @Test public void testUpdateBucket() { BucketInfo updatedBucketInfo = BUCKET_INFO1.toBuilder().indexPage("some-page").build(); - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); EasyMock.expect(storageRpcMock.patch(updatedBucketInfo.toPb(), EMPTY_RPC_OPTIONS)) .andReturn(updatedBucketInfo.toPb()); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); BucketInfo bucket = storage.update(updatedBucketInfo); assertEquals(updatedBucketInfo, bucket); } @@ -509,12 +702,10 @@ public void testUpdateBucket() { @Test public void testUpdateBucketWithOptions() { BucketInfo updatedBucketInfo = BUCKET_INFO1.toBuilder().indexPage("some-page").build(); - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); EasyMock.expect(storageRpcMock.patch(updatedBucketInfo.toPb(), BUCKET_TARGET_OPTIONS)) .andReturn(updatedBucketInfo.toPb()); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); BucketInfo bucket = storage.update(updatedBucketInfo, BUCKET_TARGET_METAGENERATION, BUCKET_TARGET_PREDEFINED_ACL); @@ -524,12 +715,10 @@ public void testUpdateBucketWithOptions() { @Test public void testUpdateBlob() { BlobInfo updatedBlobInfo = BLOB_INFO1.toBuilder().contentType("some-content-type").build(); - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); EasyMock.expect(storageRpcMock.patch(updatedBlobInfo.toPb(), EMPTY_RPC_OPTIONS)) .andReturn(updatedBlobInfo.toPb()); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); BlobInfo blob = storage.update(updatedBlobInfo); assertEquals(updatedBlobInfo, blob); } @@ -537,12 +726,10 @@ public void testUpdateBlob() { @Test public void testUpdateBlobWithOptions() { BlobInfo updatedBlobInfo = BLOB_INFO1.toBuilder().contentType("some-content-type").build(); - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); EasyMock.expect(storageRpcMock.patch(updatedBlobInfo.toPb(), BLOB_TARGET_OPTIONS_UPDATE)) .andReturn(updatedBlobInfo.toPb()); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); BlobInfo blob = storage.update(updatedBlobInfo, BLOB_TARGET_METAGENERATION, BLOB_TARGET_PREDEFINED_ACL); assertEquals(updatedBlobInfo, blob); @@ -550,64 +737,65 @@ public void testUpdateBlobWithOptions() { @Test public void testDeleteBucket() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); EasyMock.expect(storageRpcMock.delete(BucketInfo.of(BUCKET_NAME1).toPb(), EMPTY_RPC_OPTIONS)) .andReturn(true); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); assertTrue(storage.delete(BUCKET_NAME1)); } @Test public void testDeleteBucketWithOptions() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); EasyMock .expect(storageRpcMock.delete(BucketInfo.of(BUCKET_NAME1).toPb(), BUCKET_SOURCE_OPTIONS)) .andReturn(true); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); assertTrue(storage.delete(BUCKET_NAME1, BUCKET_SOURCE_METAGENERATION)); } @Test public void testDeleteBlob() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); EasyMock.expect( storageRpcMock.delete(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), EMPTY_RPC_OPTIONS)) .andReturn(true); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); assertTrue(storage.delete(BUCKET_NAME1, BLOB_NAME1)); } @Test public void testDeleteBlobWithOptions() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); EasyMock.expect( storageRpcMock.delete(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), BLOB_SOURCE_OPTIONS)) .andReturn(true); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); assertTrue(storage.delete(BUCKET_NAME1, BLOB_NAME1, BLOB_SOURCE_GENERATION, BLOB_SOURCE_METAGENERATION)); } + @Test + public void testDeleteBlobWithOptionsFromBlobId() { + EasyMock.expect( + storageRpcMock.delete(BLOB_INFO1.blobId().toPb(), BLOB_SOURCE_OPTIONS)) + .andReturn(true); + EasyMock.replay(storageRpcMock); + storage = options.service(); + assertTrue(storage.delete(BLOB_INFO1.blobId(), BLOB_SOURCE_GENERATION_FROM_BLOB_ID, + BLOB_SOURCE_METAGENERATION)); + } + @Test public void testCompose() { Storage.ComposeRequest req = Storage.ComposeRequest.builder() .addSource(BLOB_NAME2, BLOB_NAME3) .target(BLOB_INFO1) .build(); - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); EasyMock.expect(storageRpcMock.compose(ImmutableList.of(BLOB_INFO2.toPb(), BLOB_INFO3.toPb()), BLOB_INFO1.toPb(), EMPTY_RPC_OPTIONS)).andReturn(BLOB_INFO1.toPb()); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); BlobInfo blob = storage.compose(req); assertEquals(BLOB_INFO1, blob); } @@ -619,78 +807,128 @@ public void testComposeWithOptions() { .target(BLOB_INFO1) .targetOptions(BLOB_TARGET_GENERATION, BLOB_TARGET_METAGENERATION) .build(); - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); EasyMock.expect(storageRpcMock.compose(ImmutableList.of(BLOB_INFO2.toPb(), BLOB_INFO3.toPb()), BLOB_INFO1.toPb(), BLOB_TARGET_OPTIONS_COMPOSE)).andReturn(BLOB_INFO1.toPb()); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); BlobInfo blob = storage.compose(req); assertEquals(BLOB_INFO1, blob); } @Test public void testCopy() { - Storage.CopyRequest req = Storage.CopyRequest.builder() - .source(BUCKET_NAME1, BLOB_NAME2) - .target(BLOB_INFO1) - .build(); - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); - EasyMock.expect(storageRpcMock.copy(BLOB_INFO2.toPb(), EMPTY_RPC_OPTIONS, BLOB_INFO1.toPb(), - EMPTY_RPC_OPTIONS)).andReturn(BLOB_INFO1.toPb()); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); - BlobInfo blob = storage.copy(req); - assertEquals(BLOB_INFO1, blob); + CopyRequest request = Storage.CopyRequest.of(BLOB_INFO1.blobId(), BLOB_INFO2.blobId()); + StorageRpc.RewriteRequest rpcRequest = new StorageRpc.RewriteRequest(request.source().toPb(), + EMPTY_RPC_OPTIONS, request.target().toPb(), EMPTY_RPC_OPTIONS, null); + StorageRpc.RewriteResponse rpcResponse = new StorageRpc.RewriteResponse(rpcRequest, null, 42L, + false, "token", 21L); + EasyMock.expect(storageRpcMock.openRewrite(rpcRequest)).andReturn(rpcResponse); + EasyMock.replay(storageRpcMock); + storage = options.service(); + CopyWriter writer = storage.copy(request); + assertEquals(42L, writer.blobSize()); + assertEquals(21L, writer.totalBytesCopied()); + assertTrue(!writer.isDone()); } @Test public void testCopyWithOptions() { - Storage.CopyRequest req = Storage.CopyRequest.builder() - .source(BUCKET_NAME1, BLOB_NAME2) + CopyRequest request = Storage.CopyRequest.builder() + .source(BLOB_INFO2.blobId()) .sourceOptions(BLOB_SOURCE_GENERATION, BLOB_SOURCE_METAGENERATION) - .target(BLOB_INFO1) - .targetOptions(BLOB_TARGET_GENERATION, BLOB_TARGET_METAGENERATION) + .target(BLOB_INFO1, BLOB_TARGET_GENERATION, BLOB_TARGET_METAGENERATION) .build(); - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); - EasyMock.expect( - storageRpcMock.copy(BLOB_INFO2.toPb(), BLOB_SOURCE_OPTIONS_COPY, BLOB_INFO1.toPb(), - BLOB_TARGET_OPTIONS_COMPOSE)).andReturn(BLOB_INFO1.toPb()); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); - BlobInfo blob = storage.copy(req); - assertEquals(BLOB_INFO1, blob); + StorageRpc.RewriteRequest rpcRequest = new StorageRpc.RewriteRequest(request.source().toPb(), + BLOB_SOURCE_OPTIONS_COPY, request.target().toPb(), BLOB_TARGET_OPTIONS_COMPOSE, null); + StorageRpc.RewriteResponse rpcResponse = new StorageRpc.RewriteResponse(rpcRequest, null, 42L, + false, "token", 21L); + EasyMock.expect(storageRpcMock.openRewrite(rpcRequest)).andReturn(rpcResponse); + EasyMock.replay(storageRpcMock); + storage = options.service(); + CopyWriter writer = storage.copy(request); + assertEquals(42L, writer.blobSize()); + assertEquals(21L, writer.totalBytesCopied()); + assertTrue(!writer.isDone()); + } + + @Test + public void testCopyWithOptionsFromBlobId() { + CopyRequest request = Storage.CopyRequest.builder() + .source(BLOB_INFO1.blobId()) + .sourceOptions(BLOB_SOURCE_GENERATION_FROM_BLOB_ID, BLOB_SOURCE_METAGENERATION) + .target(BLOB_INFO1, BLOB_TARGET_GENERATION, BLOB_TARGET_METAGENERATION) + .build(); + StorageRpc.RewriteRequest rpcRequest = new StorageRpc.RewriteRequest(request.source().toPb(), + BLOB_SOURCE_OPTIONS_COPY, request.target().toPb(), BLOB_TARGET_OPTIONS_COMPOSE, null); + StorageRpc.RewriteResponse rpcResponse = + new StorageRpc.RewriteResponse(rpcRequest, null, 42L, false, "token", 21L); + EasyMock.expect(storageRpcMock.openRewrite(rpcRequest)).andReturn(rpcResponse); + EasyMock.replay(storageRpcMock); + storage = options.service(); + CopyWriter writer = storage.copy(request); + assertEquals(42L, writer.blobSize()); + assertEquals(21L, writer.totalBytesCopied()); + assertTrue(!writer.isDone()); + } + + @Test + public void testCopyMultipleRequests() { + CopyRequest request = Storage.CopyRequest.of(BLOB_INFO1.blobId(), BLOB_INFO2.blobId()); + StorageRpc.RewriteRequest rpcRequest = new StorageRpc.RewriteRequest(request.source().toPb(), + EMPTY_RPC_OPTIONS, request.target().toPb(), EMPTY_RPC_OPTIONS, null); + StorageRpc.RewriteResponse rpcResponse1 = new StorageRpc.RewriteResponse(rpcRequest, null, 42L, + false, "token", 21L); + StorageRpc.RewriteResponse rpcResponse2 = new StorageRpc.RewriteResponse(rpcRequest, + BLOB_INFO1.toPb(), 42L, true, "token", 42L); + EasyMock.expect(storageRpcMock.openRewrite(rpcRequest)).andReturn(rpcResponse1); + EasyMock.expect(storageRpcMock.continueRewrite(rpcResponse1)).andReturn(rpcResponse2); + EasyMock.replay(storageRpcMock); + storage = options.service(); + CopyWriter writer = storage.copy(request); + assertEquals(42L, writer.blobSize()); + assertEquals(21L, writer.totalBytesCopied()); + assertTrue(!writer.isDone()); + assertEquals(BLOB_INFO1, writer.result()); + assertTrue(writer.isDone()); + assertEquals(42L, writer.totalBytesCopied()); + assertEquals(42L, writer.blobSize()); } @Test public void testReadAllBytes() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); EasyMock.expect( storageRpcMock.load(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), EMPTY_RPC_OPTIONS)) .andReturn(BLOB_CONTENT); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); byte[] readBytes = storage.readAllBytes(BUCKET_NAME1, BLOB_NAME1); assertArrayEquals(BLOB_CONTENT, readBytes); } @Test public void testReadAllBytesWithOptions() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); EasyMock.expect( storageRpcMock.load(BlobId.of(BUCKET_NAME1, BLOB_NAME1).toPb(), BLOB_SOURCE_OPTIONS)) .andReturn(BLOB_CONTENT); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); byte[] readBytes = storage.readAllBytes(BUCKET_NAME1, BLOB_NAME1, BLOB_SOURCE_GENERATION, BLOB_SOURCE_METAGENERATION); assertArrayEquals(BLOB_CONTENT, readBytes); } + @Test + public void testReadAllBytesWithOptionsFromBlobId() { + EasyMock.expect( + storageRpcMock.load(BLOB_INFO1.blobId().toPb(), BLOB_SOURCE_OPTIONS)) + .andReturn(BLOB_CONTENT); + EasyMock.replay(storageRpcMock); + storage = options.service(); + byte[] readBytes = storage.readAllBytes(BLOB_INFO1.blobId(), + BLOB_SOURCE_GENERATION_FROM_BLOB_ID, BLOB_SOURCE_METAGENERATION); + assertArrayEquals(BLOB_CONTENT, readBytes); + } + @Test public void testApply() { BatchRequest req = BatchRequest.builder() @@ -732,12 +970,12 @@ public Tuple apply(StorageObject f) { StorageRpc.BatchResponse res = new StorageRpc.BatchResponse(deleteResult, updateResult, getResult); - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); + Capture capturedBatchRequest = Capture.newInstance(); EasyMock.expect(storageRpcMock.batch(EasyMock.capture(capturedBatchRequest))).andReturn(res); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); - BatchResponse batchResponse = storage.apply(req); + EasyMock.replay(storageRpcMock); + storage = options.service(); + BatchResponse batchResponse = storage.submit(req); // Verify captured StorageRpc.BatchRequest List>> capturedToDelete = @@ -773,10 +1011,9 @@ public Tuple apply(StorageObject f) { @Test public void testReader() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock).times(2); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); - BlobReadChannel channel = storage.reader(BUCKET_NAME1, BLOB_NAME1); + EasyMock.replay(storageRpcMock); + storage = options.service(); + ReadChannel channel = storage.reader(BUCKET_NAME1, BLOB_NAME1); assertNotNull(channel); assertTrue(channel.isOpen()); } @@ -784,44 +1021,55 @@ public void testReader() { @Test public void testReaderWithOptions() throws IOException { byte[] result = new byte[DEFAULT_CHUNK_SIZE]; - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.noRetries()); - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock).times(2); EasyMock.expect( storageRpcMock.read(BLOB_INFO2.toPb(), BLOB_SOURCE_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) - .andReturn(result); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); - BlobReadChannel channel = storage.reader(BUCKET_NAME1, BLOB_NAME2, BLOB_SOURCE_GENERATION, + .andReturn(StorageRpc.Tuple.of("etag", result)); + EasyMock.replay(storageRpcMock); + storage = options.service(); + ReadChannel channel = storage.reader(BUCKET_NAME1, BLOB_NAME2, BLOB_SOURCE_GENERATION, BLOB_SOURCE_METAGENERATION); assertNotNull(channel); assertTrue(channel.isOpen()); channel.read(ByteBuffer.allocate(42)); } + @Test + public void testReaderWithOptionsFromBlobId() throws IOException { + byte[] result = new byte[DEFAULT_CHUNK_SIZE]; + EasyMock.expect( + storageRpcMock.read(BLOB_INFO1.blobId().toPb(), BLOB_SOURCE_OPTIONS, 0, DEFAULT_CHUNK_SIZE)) + .andReturn(StorageRpc.Tuple.of("etag", result)); + EasyMock.replay(storageRpcMock); + storage = options.service(); + ReadChannel channel = storage.reader(BLOB_INFO1.blobId(), + BLOB_SOURCE_GENERATION_FROM_BLOB_ID, BLOB_SOURCE_METAGENERATION); + assertNotNull(channel); + assertTrue(channel.isOpen()); + channel.read(ByteBuffer.allocate(42)); + } + @Test public void testWriter() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock).times(2); BlobInfo.Builder infoBuilder = BLOB_INFO1.toBuilder(); BlobInfo infoWithHashes = infoBuilder.md5(CONTENT_MD5).crc32c(CONTENT_CRC32C).build(); BlobInfo infoWithoutHashes = infoBuilder.md5(null).crc32c(null).build(); EasyMock.expect(storageRpcMock.open(infoWithoutHashes.toPb(), EMPTY_RPC_OPTIONS)) .andReturn("upload-id"); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); - BlobWriteChannel channel = storage.writer(infoWithHashes); + EasyMock.replay(storageRpcMock); + storage = options.service(); + WriteChannel channel = storage.writer(infoWithHashes); assertNotNull(channel); assertTrue(channel.isOpen()); } @Test public void testWriterWithOptions() { - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock).times(2); BlobInfo info = BLOB_INFO1.toBuilder().md5(CONTENT_MD5).crc32c(CONTENT_CRC32C).build(); EasyMock.expect(storageRpcMock.open(info.toPb(), BLOB_TARGET_OPTIONS_CREATE)) .andReturn("upload-id"); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); - BlobWriteChannel channel = storage.writer(info, BLOB_WRITE_METAGENERATION, BLOB_WRITE_NOT_EXIST, + EasyMock.replay(storageRpcMock); + storage = options.service(); + WriteChannel channel = storage.writer(info, BLOB_WRITE_METAGENERATION, BLOB_WRITE_NOT_EXIST, BLOB_WRITE_PREDEFINED_ACL, BLOB_WRITE_CRC2C, BLOB_WRITE_MD5_HASH); assertNotNull(channel); assertTrue(channel.isOpen()); @@ -830,21 +1078,15 @@ public void testWriterWithOptions() { @Test public void testSignUrl() throws NoSuchAlgorithmException, InvalidKeyException, SignatureException, UnsupportedEncodingException { - String account = "account"; - ServiceAccountAuthCredentials credentialsMock = - EasyMock.createMock(ServiceAccountAuthCredentials.class); - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.authCredentials()).andReturn(credentialsMock).times(2); - EasyMock.expect(optionsMock.clock()).andReturn(TIME_SOURCE); - EasyMock.expect(credentialsMock.privateKey()).andReturn(privateKey); - EasyMock.expect(credentialsMock.account()).andReturn(account); - EasyMock.replay(optionsMock, storageRpcMock, credentialsMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + ServiceAccountAuthCredentials authCredentials = + ServiceAccountAuthCredentials.createFor(ACCOUNT, privateKey); + storage = options.toBuilder().authCredentials(authCredentials).build().service(); URL url = storage.signUrl(BLOB_INFO1, 14, TimeUnit.DAYS); String stringUrl = url.toString(); String expectedUrl = new StringBuilder("https://storage.googleapis.com/").append(BUCKET_NAME1).append("/") - .append(BLOB_NAME1).append("?GoogleAccessId=").append(account).append("&Expires=") + .append(BLOB_NAME1).append("?GoogleAccessId=").append(ACCOUNT).append("&Expires=") .append(42L + 1209600).append("&Signature=").toString(); assertTrue(stringUrl.startsWith(expectedUrl)); String signature = stringUrl.substring(expectedUrl.length()); @@ -859,22 +1101,15 @@ public void testSignUrl() throws NoSuchAlgorithmException, InvalidKeyException, signer.update(signedMessageBuilder.toString().getBytes(UTF_8)); assertTrue(signer.verify(BaseEncoding.base64().decode( URLDecoder.decode(signature, UTF_8.name())))); - EasyMock.verify(credentialsMock); } @Test public void testSignUrlWithOptions() throws NoSuchAlgorithmException, InvalidKeyException, SignatureException, UnsupportedEncodingException { - String account = "account"; - ServiceAccountAuthCredentials credentialsMock = - EasyMock.createMock(ServiceAccountAuthCredentials.class); - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.authCredentials()).andReturn(credentialsMock).times(2); - EasyMock.expect(optionsMock.clock()).andReturn(TIME_SOURCE); - EasyMock.expect(credentialsMock.privateKey()).andReturn(privateKey); - EasyMock.expect(credentialsMock.account()).andReturn(account); - EasyMock.replay(optionsMock, storageRpcMock, credentialsMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + ServiceAccountAuthCredentials authCredentials = + ServiceAccountAuthCredentials.createFor(ACCOUNT, privateKey); + storage = options.toBuilder().authCredentials(authCredentials).build().service(); URL url = storage.signUrl(BLOB_INFO1, 14, TimeUnit.DAYS, Storage.SignUrlOption.httpMethod(HttpMethod.POST), @@ -882,7 +1117,7 @@ public void testSignUrlWithOptions() throws NoSuchAlgorithmException, InvalidKey String stringUrl = url.toString(); String expectedUrl = new StringBuilder("https://storage.googleapis.com/").append(BUCKET_NAME1).append("/") - .append(BLOB_NAME1).append("?GoogleAccessId=").append(account).append("&Expires=") + .append(BLOB_NAME1).append("?GoogleAccessId=").append(ACCOUNT).append("&Expires=") .append(42L + 1209600).append("&Signature=").toString(); assertTrue(stringUrl.startsWith(expectedUrl)); String signature = stringUrl.substring(expectedUrl.length()); @@ -897,7 +1132,6 @@ public void testSignUrlWithOptions() throws NoSuchAlgorithmException, InvalidKey signer.update(signedMessageBuilder.toString().getBytes(UTF_8)); assertTrue(signer.verify(BaseEncoding.base64().decode( URLDecoder.decode(signature, UTF_8.name())))); - EasyMock.verify(credentialsMock); } @Test @@ -920,11 +1154,11 @@ public Tuple apply(StorageObject f) { StorageRpc.BatchResponse res = new StorageRpc.BatchResponse(deleteResult, updateResult, getResult); - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); + Capture capturedBatchRequest = Capture.newInstance(); EasyMock.expect(storageRpcMock.batch(EasyMock.capture(capturedBatchRequest))).andReturn(res); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); List resultBlobs = storage.get(blobId1, blobId2); // Verify captured StorageRpc.BatchRequest @@ -963,11 +1197,11 @@ public Tuple apply(StorageObject f) { StorageRpc.BatchResponse res = new StorageRpc.BatchResponse(deleteResult, updateResult, getResult); - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); + Capture capturedBatchRequest = Capture.newInstance(); EasyMock.expect(storageRpcMock.batch(EasyMock.capture(capturedBatchRequest))).andReturn(res); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); List resultBlobs = storage.update(blobInfo1, blobInfo2); // Verify captured StorageRpc.BatchRequest @@ -1006,11 +1240,10 @@ public Tuple apply(StorageObject f) { StorageRpc.BatchResponse res = new StorageRpc.BatchResponse(deleteResult, updateResult, getResult); - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); Capture capturedBatchRequest = Capture.newInstance(); EasyMock.expect(storageRpcMock.batch(EasyMock.capture(capturedBatchRequest))).andReturn(res); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.service(); List deleteResults = storage.delete(blobInfo1.blobId(), blobInfo2.blobId()); // Verify captured StorageRpc.BatchRequest @@ -1032,13 +1265,11 @@ public Tuple apply(StorageObject f) { @Test public void testRetryableException() { BlobId blob = BlobId.of(BUCKET_NAME1, BLOB_NAME1); - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.getDefaultInstance()); EasyMock.expect(storageRpcMock.get(blob.toPb(), EMPTY_RPC_OPTIONS)) - .andThrow(new StorageException(500, "InternalError", true)) + .andThrow(new StorageException(500, "internalError")) .andReturn(BLOB_INFO1.toPb()); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service(); BlobInfo readBlob = storage.get(blob); assertEquals(BLOB_INFO1, readBlob); } @@ -1047,12 +1278,10 @@ public void testRetryableException() { public void testNonRetryableException() { BlobId blob = BlobId.of(BUCKET_NAME1, BLOB_NAME1); String exceptionMessage = "Not Implemented"; - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.getDefaultInstance()); EasyMock.expect(storageRpcMock.get(blob.toPb(), EMPTY_RPC_OPTIONS)) - .andThrow(new StorageException(501, exceptionMessage, false)); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + .andThrow(new StorageException(501, exceptionMessage)); + EasyMock.replay(storageRpcMock); + storage = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service(); thrown.expect(StorageException.class); thrown.expectMessage(exceptionMessage); storage.get(blob); @@ -1062,12 +1291,10 @@ public void testNonRetryableException() { public void testRuntimeException() { BlobId blob = BlobId.of(BUCKET_NAME1, BLOB_NAME1); String exceptionMessage = "Artificial runtime exception"; - EasyMock.expect(optionsMock.storageRpc()).andReturn(storageRpcMock); - EasyMock.expect(optionsMock.retryParams()).andReturn(RetryParams.getDefaultInstance()); EasyMock.expect(storageRpcMock.get(blob.toPb(), EMPTY_RPC_OPTIONS)) .andThrow(new RuntimeException(exceptionMessage)); - EasyMock.replay(optionsMock, storageRpcMock); - storage = StorageFactory.instance().get(optionsMock); + EasyMock.replay(storageRpcMock); + storage = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service(); thrown.expect(StorageException.class); thrown.expectMessage(exceptionMessage); storage.get(blob); diff --git a/gcloud-java/README.md b/gcloud-java/README.md index 7e2eee84a8c4..a080d787d6ab 100644 --- a/gcloud-java/README.md +++ b/gcloud-java/README.md @@ -20,14 +20,27 @@ This client supports the following Google Cloud Platform services: Quickstart ---------- -Add this to your pom.xml file +If you are using Maven, add this to your pom.xml file ```xml com.google.gcloud gcloud-java - 0.0.10 + 0.1.3 ``` +If you are using Gradle, add this to your dependencies +```Groovy +compile 'com.google.gcloud:gcloud-java:0.1.3' +``` +If you are using SBT, add this to your dependencies +```Scala +libraryDependencies += "com.google.gcloud" % "gcloud-java" % "0.1.3" +``` + +Troubleshooting +--------------- + +To get help, follow the `gcloud-java` links in the `gcloud-*` [shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting). Java Versions ------------- @@ -48,7 +61,9 @@ Contributing Contributions to this library are always welcome and highly encouraged. -See [CONTRIBUTING] for more information on how to get started. +See `gcloud-java`'s [CONTRIBUTING] documentation and the `gcloud-*` [shared documentation](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/contributing/readme.md#how-to-contribute-to-gcloud) for more information on how to get started. + +Please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information. License ------- @@ -57,6 +72,7 @@ Apache 2.0 - See [LICENSE] for more information. [CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md +[code-of-conduct]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct [LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE [cloud-platform]: https://cloud.google.com/ [cloud-datastore]: https://cloud.google.com/datastore/docs diff --git a/gcloud-java/pom.xml b/gcloud-java/pom.xml index 6ad0fce8f1b1..60e98fa63396 100644 --- a/gcloud-java/pom.xml +++ b/gcloud-java/pom.xml @@ -11,9 +11,14 @@ com.google.gcloud gcloud-java-pom - 0.0.11-SNAPSHOT + 0.1.4-SNAPSHOT + + ${project.groupId} + gcloud-java-bigquery + ${project.version} + ${project.groupId} gcloud-java-core @@ -24,6 +29,11 @@ gcloud-java-datastore ${project.version} + + ${project.groupId} + gcloud-java-resourcemanager + ${project.version} + ${project.groupId} gcloud-java-storage diff --git a/pom.xml b/pom.xml index 9672c02a8ca7..2d1bfb1e0a74 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.gcloud gcloud-java-pom pom - 0.0.11-SNAPSHOT + 0.1.4-SNAPSHOT GCloud Java https://github.com/GoogleCloudPlatform/gcloud-java @@ -63,13 +63,17 @@ UTF-8 UTF-8 github + gcloud-java + gcloud-java + gcloud-java-bigquery + gcloud-java-contrib gcloud-java-core gcloud-java-datastore - gcloud-java-storage - gcloud-java gcloud-java-examples + gcloud-java-resourcemanager + gcloud-java-storage @@ -332,6 +336,8 @@ true true true + ${site.installationModule} + jar diff --git a/src/site/resources/index.html b/src/site/resources/index.html index 4daf4fa596f4..8f40cfbcd97e 100644 --- a/src/site/resources/index.html +++ b/src/site/resources/index.html @@ -118,15 +118,15 @@

      What is it?

      integrates better with your codebase. All this means you spend more time creating code that matters to you.

      - +

      gcloud is configured to access Google Cloud Platform services and authorize (OAuth 2.0) automatically on your behalf. Add the gcloud dependency to your project and get a private key to be - up and ready to go. Better yet, if you are running on a Google - Compute Engine instance, the private key is automatically detected. + up and ready to go. Better yet, if you are running on Google + App Engine or Compute Engine, the private key is automatically detected. -

      +

      Example: Retrieve Datastore Entries

      @@ -136,7 +136,6 @@

      Example: Retrieve Datastore Entries

       import com.google.gcloud.datastore.Datastore;
      -import com.google.gcloud.datastore.DatastoreFactory;
       import com.google.gcloud.datastore.DatastoreOptions;
       import com.google.gcloud.datastore.Entity;
       import com.google.gcloud.datastore.Key;
      @@ -144,8 +143,7 @@ 

      Example: Retrieve Datastore Entries

      // Authentication is automatic inside Google Compute Engine // and Google App Engine. -DatastoreOptions options = DatastoreOptions.builder().build(); -Datastore datastore = DatastoreFactory.instance().get(options); +Datastore datastore = DatastoreOptions.defaultInstance().service(); KeyFactory keyFactory = datastore.newKeyFactory().kind(KIND); Key key = keyFactory.newKey(keyName); Entity entity = datastore.get(key); @@ -156,7 +154,6 @@

      Example: Retrieve Datastore Entries

       import com.google.gcloud.AuthCredentials;
       import com.google.gcloud.datastore.Datastore;
      -import com.google.gcloud.datastore.DatastoreFactory;
       import com.google.gcloud.datastore.DatastoreOptions;
       import com.google.gcloud.datastore.Entity;
       import com.google.gcloud.datastore.Key;
      @@ -166,7 +163,7 @@ 

      Example: Retrieve Datastore Entries

      .projectId(PROJECT_ID) .authCredentials(AuthCredentials.createForJson( new FileInputStream(PATH_TO_JSON_KEY))).build(); -Datastore datastore = DatastoreFactory.instance().get(options); +Datastore datastore = options.service(); KeyFactory keyFactory = datastore.newKeyFactory().kind(KIND); Key key = keyFactory.newKey(keyName); Entity entity = datastore.get(key); @@ -175,6 +172,18 @@

      Example: Retrieve Datastore Entries

      +
      +
      +

      Examples

      + +
        +
      • + SparkJava demo - Use gcloud-java with App Engine Managed VMs, Datastore, and SparkJava. +
      • +
      +
      +
      +

      FAQ

      diff --git a/utilities/after_success.sh b/utilities/after_success.sh index 26405bcd9db3..be7484806c46 100755 --- a/utilities/after_success.sh +++ b/utilities/after_success.sh @@ -1,5 +1,4 @@ #!/bin/bash -source ./utilities/integration_test_env.sh # This script is used by Travis-CI to publish artifacts (binary, sorce and javadoc jars) when releasing snapshots. # This script is referenced in .travis.yml. @@ -7,35 +6,40 @@ source ./utilities/integration_test_env.sh echo "Travis branch: " ${TRAVIS_BRANCH} echo "Travis pull request: " ${TRAVIS_PULL_REQUEST} echo "Travis JDK version: " ${TRAVIS_JDK_VERSION} -if [ "${TRAVIS_JDK_VERSION}" == "oraclejdk7" -a "${TRAVIS_BRANCH}" == "master" -a "${TRAVIS_PULL_REQUEST}" == "false" ]; then - mvn cobertura:cobertura coveralls:report - SITE_VERSION=$(mvn org.apache.maven.plugins:maven-help-plugin:2.1.1:evaluate -Dexpression=project.version | grep -Ev '(^\[|\w+:)') - if [ "${SITE_VERSION##*-}" != "SNAPSHOT" ]; then - # Deploy site if not a SNAPSHOT - git config --global user.name "travis-ci" - git config --global user.email "travis@travis-ci.org" - git clone --branch gh-pages --single-branch https://github.com/GoogleCloudPlatform/gcloud-java/ tmp_gh-pages - mkdir -p tmp_gh-pages/$SITE_VERSION - mvn site -DskipTests=true - mvn site:stage -DtopSiteURL=http://googlecloudplatform.github.io/gcloud-java/site/${SITE_VERSION}/ - cd tmp_gh-pages - cp -r ../target/staging/$SITE_VERSION/* $SITE_VERSION/ - sed -i "s/{{SITE_VERSION}}/$SITE_VERSION/g" ${SITE_VERSION}/index.html # Update "Quickstart with Maven" to reflect version change - git add $SITE_VERSION - echo "" > index.html - git add index.html - echo "" > apidocs/index.html - git add apidocs/index.html - git commit -m "Added a new site for version $SITE_VERSION and updated the root directory's redirect." - git config --global push.default simple - git push --quiet "https://${CI_DEPLOY_USERNAME}:${CI_DEPLOY_PASSWORD}@github.com/GoogleCloudPlatform/gcloud-java.git" > /dev/null 2>&1 +if [ "${TRAVIS_JDK_VERSION}" == "oraclejdk7" ]; then + mvn clean cobertura:cobertura coveralls:report + if [ "${TRAVIS_PULL_REQUEST}" == "false" -a "${TRAVIS_BRANCH}" == "master" ]; then + source ./utilities/integration_test_env.sh + SITE_VERSION=$(mvn org.apache.maven.plugins:maven-help-plugin:2.1.1:evaluate -Dexpression=project.version | grep -Ev '(^\[|\w+:)') + if [ "${SITE_VERSION##*-}" != "SNAPSHOT" ]; then + # Deploy site if not a SNAPSHOT + git config --global user.name "travis-ci" + git config --global user.email "travis@travis-ci.org" + git clone --branch gh-pages --single-branch https://github.com/GoogleCloudPlatform/gcloud-java/ tmp_gh-pages + mkdir -p tmp_gh-pages/$SITE_VERSION + mvn site -DskipTests=true + mvn site:stage -DtopSiteURL=http://googlecloudplatform.github.io/gcloud-java/site/${SITE_VERSION}/ + cd tmp_gh-pages + cp -r ../target/staging/$SITE_VERSION/* $SITE_VERSION/ + sed -i "s/{{SITE_VERSION}}/$SITE_VERSION/g" ${SITE_VERSION}/index.html # Update "Quickstart with Maven" to reflect version change + git add $SITE_VERSION + echo "" > index.html + git add index.html + echo "" > apidocs/index.html + git add apidocs/index.html + git commit -m "Added a new site for version $SITE_VERSION and updated the root directory's redirect." + git config --global push.default simple + git push --quiet "https://${CI_DEPLOY_USERNAME}:${CI_DEPLOY_PASSWORD}@github.com/GoogleCloudPlatform/gcloud-java.git" > /dev/null 2>&1 - cd .. - utilities/update_docs_version.sh # Update version in READMEs - mvn clean deploy --settings ~/.m2/settings.xml -P sign-deploy + cd .. + utilities/update_docs_version.sh # Update version in READMEs + mvn clean deploy --settings ~/.m2/settings.xml -P sign-deploy + else + mvn clean deploy -DskipTests=true -Dgpg.skip=true --settings ~/.m2/settings.xml + fi else - mvn clean deploy -DskipTests=true -Dgpg.skip=true --settings ~/.m2/settings.xml + echo "Not deploying artifacts. This is only done with non-pull-request commits to master branch with Oracle Java 7 builds." fi else echo "Not deploying artifacts. This is only done with non-pull-request commits to master branch with Oracle Java 7 builds." diff --git a/utilities/integration_test_env.sh b/utilities/integration_test_env.sh index f7aca1a8a623..a1bebe4dcb69 100755 --- a/utilities/integration_test_env.sh +++ b/utilities/integration_test_env.sh @@ -1,3 +1,3 @@ # Export test env variables -export GCLOUD_TESTS_PROJECT_ID="gcloud-devel" -export GCLOUD_TESTS_KEY=$TRAVIS_BUILD_DIR/signing-tools/gcloud-devel-travis.json +export GCLOUD_PROJECT="gcloud-devel" +export GOOGLE_APPLICATION_CREDENTIALS=$TRAVIS_BUILD_DIR/signing-tools/gcloud-devel-travis.json diff --git a/utilities/update_docs_version.sh b/utilities/update_docs_version.sh index d7e7bdbfb830..4fc0aa772963 100755 --- a/utilities/update_docs_version.sh +++ b/utilities/update_docs_version.sh @@ -14,11 +14,13 @@ if [ "${RELEASED_VERSION##*-}" != "SNAPSHOT" ]; then for item in ${module_folders[*]} do sed -ri "s/[0-9]+\.[0-9]+\.[0-9]+<\/version>/${RELEASED_VERSION}<\/version>/g" ${item}/README.md + sed -ri "s/:[0-9]+\.[0-9]+\.[0-9]+'/:${RELEASED_VERSION}'/g" ${item}/README.md + sed -ri "s/\"[0-9]+\.[0-9]+\.[0-9]+\"/\"${RELEASED_VERSION}\"/g" ${item}/README.md done git add README.md */README.md git config --global user.name "travis-ci" git config --global user.email "travis@travis-ci.org" - git commit -m "Updating version in README files." + git commit -m "Updating version in README files. [ci skip]" git push --quiet "https://${CI_DEPLOY_USERNAME}:${CI_DEPLOY_PASSWORD}@github.com/GoogleCloudPlatform/gcloud-java.git" HEAD:master > /dev/null 2>&1 fi diff --git a/utilities/verify.sh b/utilities/verify.sh index 463180415e98..b29ab8d8f747 100755 --- a/utilities/verify.sh +++ b/utilities/verify.sh @@ -1,10 +1,9 @@ #!/bin/bash -source ./utilities/integration_test_env.sh - # This script is used by Travis-CI to run tests. # This script is referenced in .travis.yml. -if [ "${TRAVIS_BRANCH}" == "master" -a "${TRAVIS_PULL_REQUEST}" == "false" ]; then +if [ "${TRAVIS_PULL_REQUEST}" == "false" ]; then + source ./utilities/integration_test_env.sh # Get signing tools and API keyfile openssl aes-256-cbc -K $encrypted_631490ecae8f_key -iv $encrypted_631490ecae8f_iv -in target/travis/signing-tools.tar.enc -out $TRAVIS_BUILD_DIR/signing-tools.tar -d mkdir $TRAVIS_BUILD_DIR/signing-tools