diff --git a/README.md b/README.md index 9afe5182070d..7a2e3afc8a47 100644 --- a/README.md +++ b/README.md @@ -15,6 +15,7 @@ This client supports the following Google Cloud Platform services: - [Google Cloud Datastore] (#google-cloud-datastore) - [Google Cloud Storage] (#google-cloud-storage) - [Google Cloud Resource Manager] (#google-cloud-resource-manager) +- [Google Cloud BigQuery] (#google-cloud-bigquery) > Note: This client is a work-in-progress, and may occasionally > make backwards-incompatible changes. @@ -214,6 +215,51 @@ while (projectIterator.hasNext()) { } ``` +Google Cloud BigQuery +---------------------- + +- [API Documentation][bigquery-api] +- [Official Documentation][cloud-bigquery-docs] + +#### Preview + +Here is a code snippet showing a simple usage example from within Compute/App Engine. Note that you +must [supply credentials](#authentication) and a project ID if running this snippet elsewhere. + +```java +import com.google.gcloud.bigquery.BaseTableInfo; +import com.google.gcloud.bigquery.BigQuery; +import com.google.gcloud.bigquery.BigQueryOptions; +import com.google.gcloud.bigquery.Field; +import com.google.gcloud.bigquery.JobStatus; +import com.google.gcloud.bigquery.LoadJobInfo; +import com.google.gcloud.bigquery.Schema; +import com.google.gcloud.bigquery.TableId; +import com.google.gcloud.bigquery.TableInfo; + +BigQuery bigquery = BigQueryOptions.defaultInstance().service(); +TableId tableId = TableId.of("dataset", "table"); +BaseTableInfo info = bigquery.getTable(tableId); +if (info == null) { + System.out.println("Creating table " + tableId); + Field integerField = Field.of("fieldName", Field.Type.integer()); + bigquery.create(TableInfo.of(tableId, Schema.of(integerField))); +} else { + System.out.println("Loading data into table " + tableId); + LoadJobInfo loadJob = LoadJobInfo.of(tableId, "gs://bucket/path"); + loadJob = bigquery.create(loadJob); + while (loadJob.status().state() != JobStatus.State.DONE) { + Thread.sleep(1000L); + loadJob = bigquery.getJob(loadJob.jobId()); + } + if (loadJob.status().error() != null) { + System.out.println("Job completed with errors"); + } else { + System.out.println("Job succeeded"); + } +} +``` + Troubleshooting --------------- @@ -276,3 +322,7 @@ Apache 2.0 - See [LICENSE] for more information. [resourcemanager-api]:http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/resourcemanager/package-summary.html [cloud-resourcemanager-docs]:https://cloud.google.com/resource-manager/ + +[cloud-bigquery]: https://cloud.google.com/bigquery/ +[cloud-bigquery-docs]: https://cloud.google.com/bigquery/docs/overview +[bigquery-api]: http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/bigquery/package-summary.html \ No newline at end of file diff --git a/TESTING.md b/TESTING.md index d6c045eefb1e..3ad181310b17 100644 --- a/TESTING.md +++ b/TESTING.md @@ -5,6 +5,7 @@ This library provides tools to help write tests for code that uses the following - [Datastore] (#testing-code-that-uses-datastore) - [Storage] (#testing-code-that-uses-storage) - [Resource Manager] (#testing-code-that-uses-resource-manager) +- [BigQuery] (#testing-code-that-uses-bigquery) ### Testing code that uses Datastore @@ -103,5 +104,34 @@ You can test against a temporary local Resource Manager by following these steps This method will block until the server thread has been terminated. +### Testing code that uses BigQuery + +Currently, there isn't an emulator for Google BigQuery, so an alternative is to create a test +project. `RemoteBigQueryHelper` contains convenience methods to make setting up and cleaning up the +test project easier. To use this class, follow the steps below: + +1. Create a test Google Cloud project. + +2. Download a [JSON service account credentials file][create-service-account] from the Google +Developer's Console. + +3. Create a `RemoteBigQueryHelper` object using your project ID and JSON key. +Here is an example that uses the `RemoteBigQueryHelper` to create a dataset. + ```java + RemoteBigQueryHelper bigqueryHelper = + RemoteBigQueryHelper.create(PROJECT_ID, new FileInputStream("/path/to/my/JSON/key.json")); + BigQuery bigquery = bigqueryHelper.options().service(); + String dataset = RemoteBigQueryHelper.generateDatasetName(); + bigquery.create(DatasetInfo.builder(dataset).build()); + ``` + +4. Run your tests. + +5. Clean up the test project by using `forceDelete` to clear any datasets used. +Here is an example that clears the dataset created in Step 3. + ```java + RemoteBigQueryHelper.forceDelete(bigquery, dataset); + ``` [cloud-platform-storage-authentication]:https://cloud.google.com/storage/docs/authentication?hl=en#service_accounts +[create-service-account]:https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount \ No newline at end of file diff --git a/gcloud-java-bigquery/README.md b/gcloud-java-bigquery/README.md new file mode 100644 index 000000000000..077e4fbc332a --- /dev/null +++ b/gcloud-java-bigquery/README.md @@ -0,0 +1,322 @@ +Google Cloud Java Client for BigQuery +==================================== + +Java idiomatic client for [Google Cloud BigQuery] (https://cloud.google.com/bigquery). + +[![Build Status](https://travis-ci.org/GoogleCloudPlatform/gcloud-java.svg?branch=master)](https://travis-ci.org/GoogleCloudPlatform/gcloud-java) +[![Coverage Status](https://coveralls.io/repos/GoogleCloudPlatform/gcloud-java/badge.svg?branch=master)](https://coveralls.io/r/GoogleCloudPlatform/gcloud-java?branch=master) + + +- [Homepage] (https://googlecloudplatform.github.io/gcloud-java/) +- [API Documentation] (http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/bigquery/package-summary.html) + +> Note: This client is a work-in-progress, and may occasionally +> make backwards-incompatible changes. + +Quickstart +---------- +If you are using Maven, add this to your pom.xml file + + +If you are using Gradle, add this to your dependencies + + +If you are using SBT, add this to your dependencies + + +Example Application +------------------- + + + +Authentication +-------------- + +See the [Authentication](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) section in the base directory's README. + +About Google Cloud BigQuery +-------------------------- + +[Google Cloud BigQuery][cloud-bigquery] is a fully managed, NoOps, low cost data analytics service. +Data can be streamed into BigQuery at millions of rows per second to enable real-time analysis. +With BigQuery you can easily deploy Petabyte-scale Databases. + +Be sure to activate the Google Cloud BigQuery API on the Developer's Console to use BigQuery from +your project. + +See the ``gcloud-java`` API [bigquery documentation][bigquery-api] to learn how to interact +with Google Cloud BigQuery using this Client Library. + +Getting Started +--------------- +#### Prerequisites +For this tutorial, you will need a +[Google Developers Console](https://console.developers.google.com/) project with the BigQuery API +enabled. You will need to [enable billing](https://support.google.com/cloud/answer/6158867?hl=en) to +use Google Cloud BigQuery. +[Follow these instructions](https://cloud.google.com/docs/authentication#preparation) to get your +project set up. You will also need to set up the local development environment by [installing the +Google Cloud SDK](https://cloud.google.com/sdk/) and running the following commands in command line: +`gcloud auth login` and `gcloud config set project [YOUR PROJECT ID]`. + +#### Installation and setup +You'll need to obtain the `gcloud-java-bigquery` library. See the [Quickstart](#quickstart) section +to add `gcloud-java-bigquery` as a dependency in your code. + +#### Creating an authorized service object +To make authenticated requests to Google Cloud BigQuery, you must create a service object with +credentials. You can then make API calls by calling methods on the BigQuery service object. The +simplest way to authenticate is to use +[Application Default Credentials](https://developers.google.com/identity/protocols/application-default-credentials). +These credentials are automatically inferred from your environment, so you only need the following +code to create your service object: + +```java +import com.google.gcloud.bigquery.BigQuery; +import com.google.gcloud.bigquery.BigQueryOptions; + +BigQuery bigquery = BigQueryOptions.defaultInstance().service(); +``` + +For other authentication options, see the +[Authentication](https://github.com/GoogleCloudPlatform/gcloud-java#authentication) page. + +#### Creating a dataset +With BigQuery you can create datasets. A dataset is a grouping mechanism that holds zero or more +tables. Add the following import at the top of your file: + +```java +import com.google.gcloud.bigquery.DatasetInfo; +``` +Then, to create the dataset, use the following code: + +```java +// Create a dataset +String datasetId = "my_dataset_id"; +bigquery.create(DatasetInfo.builder(datasetId).build()); +``` + +#### Creating a table +With BigQuery you can create different types of tables: normal tables with an associated schema, +external tables backed by data stored on [Google Cloud Storage][cloud-storage] and view tables that +are created from a BigQuery SQL query. In this code snippet we show how to create a normal table +with only one string field. Add the following imports at the top of your file: + +```java +import com.google.gcloud.bigquery.BaseTableInfo; +import com.google.gcloud.bigquery.Field; +import com.google.gcloud.bigquery.Schema; +import com.google.gcloud.bigquery.TableId; +import com.google.gcloud.bigquery.TableInfo; +``` +Then add the following code to create the table: + +```java +TableId tableId = TableId.of(datasetId, "my_table_id"); +// Table field definition +Field stringField = Field.of("StringField", Field.Type.string()); +// Table schema definition +Schema schema = Schema.of(stringField); +// Create a table +TableInfo createdTableInfo = bigquery.create(TableInfo.of(tableId, schema)); +``` + +#### Loading data into a table +BigQuery provides several ways to load data into a table: streaming rows or loading data from a +Google Cloud Storage file. In this code snippet we show how to stream rows into a table. +Add the following imports at the top of your file: + +```java +import com.google.gcloud.bigquery.InsertAllRequest; +import com.google.gcloud.bigquery.InsertAllResponse; + +import java.util.HashMap; +import java.util.Map; +``` +Then add the following code to insert data: + +```java +Map firstRow = new HashMap<>(); +Map secondRow = new HashMap<>(); +firstRow.put("StringField", "value1"); +secondRow.put("StringField", "value2"); +// Create an insert request +InsertAllRequest insertRequest = InsertAllRequest.builder(tableId) + .addRow(firstRow) + .addRow(secondRow) + .build(); +// Insert rows +InsertAllResponse insertResponse = bigquery.insertAll(insertRequest); +// Check if errors occurred +if (insertResponse.hasErrors()) { + System.out.println("Errors occurred while inserting rows"); +} +``` + +#### Querying data +BigQuery enables querying data by running queries and waiting for the result. Queries can be run +directly or through a Query Job. In this code snippet we show how to run a query directly and wait +for the result. Add the following imports at the top of your file: + +```java +import com.google.gcloud.bigquery.FieldValue; +import com.google.gcloud.bigquery.QueryRequest; +import com.google.gcloud.bigquery.QueryResponse; + +import java.util.Iterator; +import java.util.List; +``` +Then add the following code to run the query and wait for the result: + +```java +// Create a query request +QueryRequest queryRequest = + QueryRequest.builder("SELECT * FROM my_dataset_id.my_table_id") + .maxWaitTime(60000L) + .maxResults(1000L) + .build(); +// Request query to be executed and wait for results +QueryResponse queryResponse = bigquery.query(queryRequest); +while (!queryResponse.jobComplete()) { + Thread.sleep(1000L); + queryResponse = bigquery.getQueryResults(queryResponse.jobId()); +} +// Read rows +Iterator> rowIterator = queryResponse.result().iterateAll(); +System.out.println("Table rows:"); +while (rowIterator.hasNext()) { + System.out.println(rowIterator.next()); +} +``` +#### Complete source code + +Here we put together all the code shown above into one program. This program assumes that you are +running on Compute Engine or from your own desktop. To run this example on App Engine, simply move +the code from the main method to your application's servlet class and change the print statements to +display on your webpage. + +```java +import com.google.gcloud.bigquery.BaseTableInfo; +import com.google.gcloud.bigquery.BigQuery; +import com.google.gcloud.bigquery.BigQueryOptions; +import com.google.gcloud.bigquery.DatasetInfo; +import com.google.gcloud.bigquery.Field; +import com.google.gcloud.bigquery.FieldValue; +import com.google.gcloud.bigquery.InsertAllRequest; +import com.google.gcloud.bigquery.InsertAllResponse; +import com.google.gcloud.bigquery.QueryRequest; +import com.google.gcloud.bigquery.QueryResponse; +import com.google.gcloud.bigquery.Schema; +import com.google.gcloud.bigquery.TableId; +import com.google.gcloud.bigquery.TableInfo; + +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +public class GcloudBigQueryExample { + + public static void main(String[] args) throws InterruptedException { + + // Create a service instance + BigQuery bigquery = BigQueryOptions.defaultInstance().service(); + + // Create a dataset + String datasetId = "my_dataset_id"; + bigquery.create(DatasetInfo.builder(datasetId).build()); + + TableId tableId = TableId.of(datasetId, "my_table_id"); + // Table field definition + Field stringField = Field.of("StringField", Field.Type.string()); + // Table schema definition + Schema schema = Schema.of(stringField); + // Create a table + TableInfo createdTableInfo = bigquery.create(TableInfo.of(tableId, schema)); + + // Define rows to insert + Map firstRow = new HashMap<>(); + Map secondRow = new HashMap<>(); + firstRow.put("StringField", "value1"); + secondRow.put("StringField", "value2"); + // Create an insert request + InsertAllRequest insertRequest = InsertAllRequest.builder(tableId) + .addRow(firstRow) + .addRow(secondRow) + .build(); + // Insert rows + InsertAllResponse insertResponse = bigquery.insertAll(insertRequest); + // Check if errors occurred + if (insertResponse.hasErrors()) { + System.out.println("Errors occurred while inserting rows"); + } + + // Create a query request + QueryRequest queryRequest = + QueryRequest.builder("SELECT * FROM my_dataset_id.my_table_id") + .maxWaitTime(60000L) + .maxResults(1000L) + .build(); + // Request query to be executed and wait for results + QueryResponse queryResponse = bigquery.query(queryRequest); + while (!queryResponse.jobComplete()) { + Thread.sleep(1000L); + queryResponse = bigquery.getQueryResults(queryResponse.jobId()); + } + // Read rows + Iterator> rowIterator = queryResponse.result().iterateAll(); + System.out.println("Table rows:"); + while (rowIterator.hasNext()) { + System.out.println(rowIterator.next()); + } + } +} +``` + +Troubleshooting +--------------- + +To get help, follow the `gcloud-java` links in the `gcloud-*`[shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting). + +Java Versions +------------- + +Java 7 or above is required for using this client. + +Testing +------- + +This library has tools to help make tests for code using Cloud BigQuery. + +See [TESTING] to read more about testing. + +Versioning +---------- + +This library follows [Semantic Versioning] (http://semver.org/). + +It is currently in major version zero (``0.y.z``), which means that anything +may change at any time and the public API should not be considered +stable. + +Contributing +------------ + +Contributions to this library are always welcome and highly encouraged. + +See [CONTRIBUTING] for more information on how to get started. + +License +------- + +Apache 2.0 - See [LICENSE] for more information. + + +[CONTRIBUTING]:https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/CONTRIBUTING.md +[LICENSE]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/LICENSE +[TESTING]: https://github.com/GoogleCloudPlatform/gcloud-java/blob/master/TESTING.md#testing-code-that-uses-bigquery +[cloud-platform]: https://cloud.google.com/ + +[cloud-bigquery]: https://cloud.google.com/bigquery/ +[cloud-storage]: https://cloud.google.com/storage/ +[bigquery-api]: http://googlecloudplatform.github.io/gcloud-java/apidocs/index.html?com/google/gcloud/bigquery/package-summary.html \ No newline at end of file diff --git a/gcloud-java-bigquery/pom.xml b/gcloud-java-bigquery/pom.xml new file mode 100644 index 000000000000..2f22fad6fea8 --- /dev/null +++ b/gcloud-java-bigquery/pom.xml @@ -0,0 +1,56 @@ + + + 4.0.0 + com.google.gcloud + gcloud-java-bigquery + jar + GCloud Java bigquery + + Java idiomatic client for Google Cloud BigQuery. + + + com.google.gcloud + gcloud-java-pom + 0.1.1-SNAPSHOT + + + gcloud-java-bigquery + + + + ${project.groupId} + gcloud-java-core + ${project.version} + + + ${project.groupId} + gcloud-java-storage + ${project.version} + test + + + com.google.apis + google-api-services-bigquery + v2-rev254-1.21.0 + compile + + + com.google.guava + guava-jdk5 + + + + + junit + junit + 4.12 + test + + + org.easymock + easymock + 3.3 + test + + + diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Acl.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Acl.java new file mode 100644 index 000000000000..2a042c108e00 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Acl.java @@ -0,0 +1,438 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.Dataset.Access; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Access Control for a BigQuery Dataset. BigQuery uses ACLs to manage permissions on datasets. ACLs + * are not directly supported on tables. A table inherits its ACL from the dataset that contains it. + * Project roles affect your ability to run jobs or manage the project, while dataset roles affect + * how you can access or modify the data inside of a project. + * + * @see Access Control + */ +public final class Acl implements Serializable { + + private static final long serialVersionUID = 8357269726277191556L; + + private final Entity entity; + private final Role role; + + /** + * Dataset roles supported by BigQuery. + * + * @see Dataset Roles + */ + public enum Role { + /** + * Can read, query, copy or export tables in the dataset. + */ + READER, + + /** + * Same as {@link #READER} plus can edit or append data in the dataset. + */ + WRITER, + + /** + * Same as {@link #WRITER} plus can update and delete the dataset. + */ + OWNER + } + + /** + * Base class for BigQuery entities that can be grant access to the dataset. + */ + public abstract static class Entity implements Serializable { + + private static final long serialVersionUID = 8111776788607959944L; + + private final Type type; + + /** + * Types of BigQuery entities. + */ + public enum Type { + DOMAIN, GROUP, USER, VIEW + } + + Entity(Type type) { + this.type = type; + } + + public Type type() { + return type; + } + + abstract Access toPb(); + + static Entity fromPb(Access access) { + if (access.getDomain() != null) { + return new Domain(access.getDomain()); + } + if (access.getGroupByEmail() != null) { + return new Group(access.getGroupByEmail()); + } + if (access.getSpecialGroup() != null) { + return new Group(access.getSpecialGroup()); + } + if (access.getUserByEmail() != null) { + return new User(access.getUserByEmail()); + } + if (access.getView() != null) { + return new View(TableId.fromPb(access.getView())); + } + // Unreachable + throw new BigQueryException(BigQueryException.UNKNOWN_CODE, + "Unrecognized access configuration", false); + } + } + + /** + * Class for a BigQuery Domain entity. Objects of this class represent a domain to grant access + * to. Any users signed in with the domain specified will be granted the specified access. + */ + public static final class Domain extends Entity { + + private static final long serialVersionUID = -3033025857280447253L; + + private final String domain; + + /** + * Creates a Domain entity given the domain name. + */ + public Domain(String domain) { + super(Type.DOMAIN); + this.domain = domain; + } + + /** + * Returns the domain name. + */ + public String domain() { + return domain; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + Domain domainEntity = (Domain) obj; + return Objects.equals(type(), domainEntity.type()) + && Objects.equals(domain, domainEntity.domain()); + } + + @Override + public int hashCode() { + return Objects.hash(type(), domain); + } + + @Override + public String toString() { + return toPb().toString(); + } + + @Override + Access toPb() { + return new Access().setDomain(domain); + } + } + + /** + * Class for a BigQuery Group entity. Objects of this class represent a group to granted access + * to. A Group entity can be created given the group's email or can be a special group: + * {@link #ofProjectOwners()}, {@link #ofProjectReaders()}, {@link #ofProjectWriters()} or + * {@link #ofAllAuthenticatedUsers()}. + */ + public static final class Group extends Entity { + + private static final String PROJECT_OWNERS = "projectOwners"; + private static final String PROJECT_READERS = "projectReaders"; + private static final String PROJECT_WRITERS = "projectWriters"; + private static final String ALL_AUTHENTICATED_USERS = "allAuthenticatedUsers"; + private static final long serialVersionUID = 5146829352398103029L; + + private final String identifier; + + /** + * Creates a Group entity given its identifier. Identifier can be either a + * + * special group identifier or a group email. + */ + public Group(String identifier) { + super(Type.GROUP); + this.identifier = identifier; + } + + /** + * Returns group's identifier, can be either a + * + * special group identifier or a group email. + */ + public String identifier() { + return identifier; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + Group group = (Group) obj; + return Objects.equals(type(), group.type()) && Objects.equals(identifier, group.identifier); + } + + @Override + public int hashCode() { + return Objects.hash(type(), identifier); + } + + @Override + public String toString() { + return toPb().toString(); + } + + @Override + Access toPb() { + switch (identifier) { + case PROJECT_OWNERS: + return new Access().setSpecialGroup(PROJECT_OWNERS); + case PROJECT_READERS: + return new Access().setSpecialGroup(PROJECT_READERS); + case PROJECT_WRITERS: + return new Access().setSpecialGroup(PROJECT_WRITERS); + case ALL_AUTHENTICATED_USERS: + return new Access().setSpecialGroup(ALL_AUTHENTICATED_USERS); + default: + return new Access().setGroupByEmail(identifier); + } + } + + /** + * Returns a Group entity representing all project's owners. + */ + public static Group ofProjectOwners() { + return new Group(PROJECT_OWNERS); + } + + /** + * Returns a Group entity representing all project's readers. + */ + public static Group ofProjectReaders() { + return new Group(PROJECT_READERS); + } + + /** + * Returns a Group entity representing all project's writers. + */ + public static Group ofProjectWriters() { + return new Group(PROJECT_WRITERS); + } + + /** + * Returns a Group entity representing all BigQuery authenticated users. + */ + public static Group ofAllAuthenticatedUsers() { + return new Group(ALL_AUTHENTICATED_USERS); + } + } + + /** + * Class for a BigQuery User entity. Objects of this class represent a user to grant access to + * given the email address. + */ + public static final class User extends Entity { + + private static final long serialVersionUID = -4942821351073996141L; + + private final String email; + + /** + * Creates a User entity given the user's email. + */ + public User(String email) { + super(Type.USER); + this.email = email; + } + + /** + * Returns user's email. + */ + public String email() { + return email; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + User user = (User) obj; + return Objects.equals(type(), user.type()) && Objects.equals(email, user.email); + } + + @Override + public int hashCode() { + return Objects.hash(type(), email); + } + + @Override + public String toString() { + return toPb().toString(); + } + + @Override + Access toPb() { + return new Access().setUserByEmail(email); + } + } + + /** + * Class for a BigQuery View entity. Objects of this class represent a view from a different + * dataset to grant access to. Queries executed against that view will have read access to tables + * in this dataset. The role field is not required when this field is set. If that view is updated + * by any user, access to the view needs to be granted again via an update operation. + */ + public static final class View extends Entity { + + private final TableId id; + + /** + * Creates a View entity given the view's id. + */ + public View(TableId id) { + super(Type.VIEW); + this.id = id; + } + + /** + * Returns table's identity. + */ + public TableId id() { + return id; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + View view = (View) obj; + return Objects.equals(type(), view.type()) && Objects.equals(id, view.id); + } + + @Override + public int hashCode() { + return Objects.hash(type(), id); + } + + @Override + public String toString() { + return toPb().toString(); + } + + @Override + Access toPb() { + return new Access().setView(id.toPb()); + } + } + + /** + * Build an ACL for an {@code entity} and a {@code role}. + */ + public Acl(Entity entity, Role role) { + this.entity = checkNotNull(entity); + this.role = role; + } + + /** + * Build an ACL for a view entity. + */ + public Acl(View view) { + this.entity = checkNotNull(view); + this.role = null; + } + + /** + * Returns the entity for this ACL. + */ + public Entity entity() { + return entity; + } + + /** + * Returns the role specified by this ACL. + */ + public Role role() { + return role; + } + + @Override + public int hashCode() { + return Objects.hash(entity, role); + } + + @Override + public String toString() { + return toPb().toString(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + final Acl other = (Acl) obj; + return Objects.equals(this.entity, other.entity) + && Objects.equals(this.role, other.role); + } + + Access toPb() { + Access accessPb = entity.toPb(); + if (role != null) { + accessPb.setRole(role.name()); + } + return accessPb; + } + + static Acl fromPb(Access access) { + return new Acl(Entity.fromPb(access), + access.getRole() != null ? Role.valueOf(access.getRole()) : null); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BaseTableInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BaseTableInfo.java new file mode 100644 index 000000000000..16d2af6f4580 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BaseTableInfo.java @@ -0,0 +1,431 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.MoreObjects.firstNonNull; +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.client.util.Data; +import com.google.api.services.bigquery.model.Table; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.base.MoreObjects.ToStringHelper; + +import java.io.Serializable; +import java.math.BigInteger; +import java.util.Objects; + +/** + * Base class for Google BigQuery table information. Use {@link TableInfo} for a simple BigQuery + * Table. Use {@link ViewInfo} for a BigQuery View Table. Use {@link ExternalTableInfo} for a + * BigQuery Table backed by external data. + * + * @see Managing Tables + */ +public abstract class BaseTableInfo implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public BaseTableInfo apply(Table pb) { + return BaseTableInfo.fromPb(pb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public Table apply(BaseTableInfo tableInfo) { + return tableInfo.toPb(); + } + }; + + private static final long serialVersionUID = -7679032506430816205L; + + /** + * The table type. + */ + public enum Type { + /** + * A normal BigQuery table. + */ + TABLE, + + /** + * A virtual table defined by a SQL query. + * + * @see Views + */ + VIEW, + + /** + * A BigQuery table backed by external data. + * + * @see Federated Data + * Sources + */ + EXTERNAL + } + + private final String etag; + private final String id; + private final String selfLink; + private final TableId tableId; + private final Type type; + private final Schema schema; + private final String friendlyName; + private final String description; + private final Long numBytes; + private final Long numRows; + private final Long creationTime; + private final Long expirationTime; + private final Long lastModifiedTime; + + public abstract static class Builder> { + + private String etag; + private String id; + private String selfLink; + private TableId tableId; + private Type type; + private Schema schema; + private String friendlyName; + private String description; + private Long numBytes; + private Long numRows; + private Long creationTime; + private Long expirationTime; + private Long lastModifiedTime; + + protected Builder() {} + + protected Builder(BaseTableInfo tableInfo) { + this.etag = tableInfo.etag; + this.id = tableInfo.id; + this.selfLink = tableInfo.selfLink; + this.tableId = tableInfo.tableId; + this.type = tableInfo.type; + this.schema = tableInfo.schema; + this.friendlyName = tableInfo.friendlyName; + this.description = tableInfo.description; + this.numBytes = tableInfo.numBytes; + this.numRows = tableInfo.numRows; + this.creationTime = tableInfo.creationTime; + this.expirationTime = tableInfo.expirationTime; + this.lastModifiedTime = tableInfo.lastModifiedTime; + } + + protected Builder(Table tablePb) { + this.type = Type.valueOf(tablePb.getType()); + this.tableId = TableId.fromPb(tablePb.getTableReference()); + if (tablePb.getSchema() != null) { + this.schema(Schema.fromPb(tablePb.getSchema())); + } + if (tablePb.getLastModifiedTime() != null) { + this.lastModifiedTime(tablePb.getLastModifiedTime().longValue()); + } + if (tablePb.getNumRows() != null) { + this.numRows(tablePb.getNumRows().longValue()); + } + this.description = tablePb.getDescription(); + this.expirationTime = tablePb.getExpirationTime(); + this.friendlyName = tablePb.getFriendlyName(); + this.creationTime = tablePb.getCreationTime(); + this.etag = tablePb.getEtag(); + this.id = tablePb.getId(); + this.numBytes = tablePb.getNumBytes(); + this.selfLink = tablePb.getSelfLink(); + } + + @SuppressWarnings("unchecked") + protected B self() { + return (B) this; + } + + B creationTime(Long creationTime) { + this.creationTime = creationTime; + return self(); + } + + /** + * Sets a user-friendly description for the table. + */ + public B description(String description) { + this.description = firstNonNull(description, Data.nullOf(String.class)); + return self(); + } + + B etag(String etag) { + this.etag = etag; + return self(); + } + + /** + * Sets the time when this table expires, in milliseconds since the epoch. If not present, the + * table will persist indefinitely. Expired tables will be deleted and their storage reclaimed. + */ + public B expirationTime(Long expirationTime) { + this.expirationTime = firstNonNull(expirationTime, Data.nullOf(Long.class)); + return self(); + } + + /** + * Sets a user-friendly name for the table. + */ + public B friendlyName(String friendlyName) { + this.friendlyName = firstNonNull(friendlyName, Data.nullOf(String.class)); + return self(); + } + + B id(String id) { + this.id = id; + return self(); + } + + B lastModifiedTime(Long lastModifiedTime) { + this.lastModifiedTime = lastModifiedTime; + return self(); + } + + B numBytes(Long numBytes) { + this.numBytes = numBytes; + return self(); + } + + B numRows(Long numRows) { + this.numRows = numRows; + return self(); + } + + B selfLink(String selfLink) { + this.selfLink = selfLink; + return self(); + } + + /** + * Sets the table identity. + */ + public B tableId(TableId tableId) { + this.tableId = checkNotNull(tableId); + return self(); + } + + B type(Type type) { + this.type = type; + return self(); + } + + /** + * Sets the table schema. + */ + public B schema(Schema schema) { + this.schema = checkNotNull(schema); + return self(); + } + + /** + * Creates an object. + */ + public abstract T build(); + } + + protected BaseTableInfo(Builder builder) { + this.tableId = checkNotNull(builder.tableId); + this.etag = builder.etag; + this.id = builder.id; + this.selfLink = builder.selfLink; + this.friendlyName = builder.friendlyName; + this.description = builder.description; + this.type = builder.type; + this.schema = builder.schema; + this.numBytes = builder.numBytes; + this.numRows = builder.numRows; + this.creationTime = builder.creationTime; + this.expirationTime = builder.expirationTime; + this.lastModifiedTime = builder.lastModifiedTime; + } + + /** + * Returns the hash of the table resource. + */ + public String etag() { + return etag; + } + + /** + * Returns an opaque id for the table. + */ + public String id() { + return id; + } + + /** + * Returns the table's type. If this table is simple table the method returns {@link Type#TABLE}. + * If this table is an external table this method returns {@link Type#EXTERNAL}. If this table is + * a view table this method returns {@link Type#VIEW}. + */ + public Type type() { + return type; + } + + /** + * Returns the table's schema. + */ + public Schema schema() { + return schema; + } + + /** + * Returns an URL that can be used to access the resource again. The returned URL can be used for + * get or update requests. + */ + public String selfLink() { + return selfLink; + } + + /** + * Returns the table identity. + */ + public TableId tableId() { + return tableId; + } + + /** + * Returns a user-friendly name for the table. + */ + public String friendlyName() { + return Data.isNull(friendlyName) ? null : friendlyName; + } + + /** + * Returns a user-friendly description for the table. + */ + public String description() { + return Data.isNull(description) ? null : description; + } + + /** + * Returns the size of this table in bytes, excluding any data in the streaming buffer. + */ + public Long numBytes() { + return numBytes; + } + + /** + * Returns the number of rows in this table, excluding any data in the streaming buffer. + */ + public Long numRows() { + return numRows; + } + + /** + * Returns the time when this table was created, in milliseconds since the epoch. + */ + public Long creationTime() { + return creationTime; + } + + /** + * Returns the time when this table expires, in milliseconds since the epoch. If not present, the + * table will persist indefinitely. Expired tables will be deleted and their storage reclaimed. + */ + public Long expirationTime() { + return Data.isNull(expirationTime) ? null : expirationTime; + } + + /** + * Returns the time when this table was last modified, in milliseconds since the epoch. + */ + public Long lastModifiedTime() { + return lastModifiedTime; + } + + /** + * Returns a builder for the object. + */ + public abstract Builder toBuilder(); + + ToStringHelper toStringHelper() { + return MoreObjects.toStringHelper(this) + .add("tableId", tableId) + .add("type", type) + .add("schema", schema) + .add("etag", etag) + .add("id", id) + .add("selfLink", selfLink) + .add("friendlyName", friendlyName) + .add("description", description) + .add("numBytes", numBytes) + .add("numRows", numRows) + .add("expirationTime", expirationTime) + .add("creationTime", creationTime) + .add("lastModifiedTime", lastModifiedTime); + } + + @Override + public String toString() { + return toStringHelper().toString(); + } + + @Override + public int hashCode() { + return Objects.hash(tableId); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof BaseTableInfo && Objects.equals(toPb(), ((BaseTableInfo) obj).toPb()); + } + + Table toPb() { + Table tablePb = new Table(); + tablePb.setTableReference(tableId.toPb()); + if (lastModifiedTime != null) { + tablePb.setLastModifiedTime(BigInteger.valueOf(lastModifiedTime)); + } + if (numRows != null) { + tablePb.setNumRows(BigInteger.valueOf(numRows)); + } + if (schema != null) { + tablePb.setSchema(schema.toPb()); + } + tablePb.setType(type.name()); + tablePb.setCreationTime(creationTime); + tablePb.setDescription(description); + tablePb.setEtag(etag); + tablePb.setExpirationTime(expirationTime); + tablePb.setFriendlyName(friendlyName); + tablePb.setId(id); + tablePb.setNumBytes(numBytes); + tablePb.setSelfLink(selfLink); + return tablePb; + } + + @SuppressWarnings("unchecked") + static T fromPb(Table tablePb) { + switch (Type.valueOf(tablePb.getType())) { + case TABLE: + return (T) TableInfo.fromPb(tablePb); + case VIEW: + return (T) ViewInfo.fromPb(tablePb); + case EXTERNAL: + return (T) ExternalTableInfo.fromPb(tablePb); + default: + // never reached + throw new IllegalArgumentException("Format " + tablePb.getType() + " is not supported"); + } + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java new file mode 100644 index 000000000000..70c225942829 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java @@ -0,0 +1,665 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkArgument; + +import com.google.common.base.Function; +import com.google.common.base.Joiner; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import com.google.gcloud.Page; +import com.google.gcloud.Service; +import com.google.gcloud.spi.BigQueryRpc; + +import java.util.List; +import java.util.Set; + +/** + * An interface for Google Cloud BigQuery. + * + * @see Google Cloud BigQuery + */ +public interface BigQuery extends Service { + + /** + * Fields of a BigQuery Dataset resource. + * + * @see Dataset + * Resource + */ + enum DatasetField { + ACCESS("access"), + CREATION_TIME("creationTime"), + DATASET_REFERENCE("datasetReference"), + DEFAULT_TABLE_EXPIRATION_MS("defaultTableExpirationMsS"), + DESCRIPTION("description"), + ETAG("etag"), + FRIENDLY_NAME("friendlyName"), + ID("id"), + LAST_MODIFIED_TIME("lastModifiedTime"), + LOCATION("location"), + SELF_LINK("selfLink"); + + private final String selector; + + DatasetField(String selector) { + this.selector = selector; + } + + public String selector() { + return selector; + } + + static String selector(DatasetField... fields) { + Set fieldStrings = Sets.newHashSetWithExpectedSize(fields.length + 1); + fieldStrings.add(DATASET_REFERENCE.selector()); + for (DatasetField field : fields) { + fieldStrings.add(field.selector()); + } + return Joiner.on(',').join(fieldStrings); + } + } + + /** + * Fields of a BigQuery Table resource. + * + * @see Table + * Resource + */ + enum TableField { + CREATION_TIME("creationTime"), + DESCRIPTION("description"), + ETAG("etag"), + EXPIRATION_TIME("expirationTime"), + EXTERNAL_DATA_CONFIGURATION("externalDataConfiguration"), + FRIENDLY_NAME("friendlyName"), + ID("id"), + LAST_MODIFIED_TIME("lastModifiedTime"), + LOCATION("location"), + NUM_BYTES("numBytes"), + NUM_ROWS("numRows"), + SCHEMA("schema"), + SELF_LINK("selfLink"), + STREAMING_BUFFER("streamingBuffer"), + TABLE_REFERENCE("tableReference"), + TYPE("type"), + VIEW("view"); + + private final String selector; + + TableField(String selector) { + this.selector = selector; + } + + public String selector() { + return selector; + } + + static String selector(TableField... fields) { + Set fieldStrings = Sets.newHashSetWithExpectedSize(fields.length + 2); + fieldStrings.add(TABLE_REFERENCE.selector()); + fieldStrings.add(TYPE.selector()); + for (TableField field : fields) { + fieldStrings.add(field.selector()); + } + return Joiner.on(',').join(fieldStrings); + } + } + + /** + * Fields of a BigQuery Job resource. + * + * @see Job Resource + * + */ + enum JobField { + CONFIGURATION("configuration"), + ETAG("etag"), + ID("id"), + JOB_REFERENCE("jobReference"), + SELF_LINK("selfLink"), + STATISTICS("statistics"), + STATUS("status"), + USER_EMAIL("user_email"); + + private final String selector; + + JobField(String selector) { + this.selector = selector; + } + + public String selector() { + return selector; + } + + static String selector(JobField... fields) { + Set fieldStrings = Sets.newHashSetWithExpectedSize(fields.length + 2); + fieldStrings.add(JOB_REFERENCE.selector()); + fieldStrings.add(CONFIGURATION.selector()); + for (JobField field : fields) { + fieldStrings.add(field.selector()); + } + return Joiner.on(',').join(fieldStrings); + } + } + + /** + * Class for specifying dataset list options. + */ + class DatasetListOption extends Option { + + private static final long serialVersionUID = 8660294969063340498L; + + private DatasetListOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the maximum number of datasets to be returned. + */ + public static DatasetListOption maxResults(long maxResults) { + return new DatasetListOption(BigQueryRpc.Option.MAX_RESULTS, maxResults); + } + + /** + * Returns an option to specify the page token from which to start listing datasets. + */ + public static DatasetListOption startPageToken(String pageToken) { + return new DatasetListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); + } + + /** + * Returns an options to list all datasets, even hidden ones. + */ + public static DatasetListOption all() { + return new DatasetListOption(BigQueryRpc.Option.ALL_DATASETS, true); + } + } + + /** + * Class for specifying dataset get, create and update options. + */ + class DatasetOption extends Option { + + private static final long serialVersionUID = 1674133909259913250L; + + private DatasetOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the dataset's fields to be returned by the RPC call. If this + * option is not provided all dataset's fields are returned. {@code DatasetOption.fields} can + * be used to specify only the fields of interest. {@link DatasetInfo#datasetId()} is always + * returned, even if not specified. + */ + public static DatasetOption fields(DatasetField... fields) { + return new DatasetOption(BigQueryRpc.Option.FIELDS, DatasetField.selector(fields)); + } + } + + /** + * Class for specifying dataset delete options. + */ + class DatasetDeleteOption extends Option { + + private static final long serialVersionUID = -7166083569900951337L; + + private DatasetDeleteOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to delete a dataset even if non-empty. If not provided, attempting to + * delete a non-empty dataset will result in a {@link BigQueryException} being thrown. + */ + public static DatasetDeleteOption deleteContents() { + return new DatasetDeleteOption(BigQueryRpc.Option.DELETE_CONTENTS, true); + } + } + + /** + * Class for specifying table list options. + */ + class TableListOption extends Option { + + private static final long serialVersionUID = 8660294969063340498L; + + private TableListOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the maximum number of tables to be returned. + */ + public static TableListOption maxResults(long maxResults) { + checkArgument(maxResults >= 0); + return new TableListOption(BigQueryRpc.Option.MAX_RESULTS, maxResults); + } + + /** + * Returns an option to specify the page token from which to start listing tables. + */ + public static TableListOption startPageToken(String pageToken) { + return new TableListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); + } + } + + /** + * Class for specifying table get, create and update options. + */ + class TableOption extends Option { + + private static final long serialVersionUID = -1723870134095936772L; + + private TableOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the table's fields to be returned by the RPC call. If this + * option is not provided all table's fields are returned. {@code TableOption.fields} can be + * used to specify only the fields of interest. {@link BaseTableInfo#tableId()} and + * {@link BaseTableInfo#type()} are always returned, even if not specified. + */ + public static TableOption fields(TableField... fields) { + return new TableOption(BigQueryRpc.Option.FIELDS, TableField.selector(fields)); + } + } + + /** + * Class for specifying table data list options. + */ + class TableDataListOption extends Option { + + private static final long serialVersionUID = 8488823381738864434L; + + private TableDataListOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the maximum number of rows to be returned. + */ + public static TableDataListOption maxResults(long maxResults) { + checkArgument(maxResults >= 0); + return new TableDataListOption(BigQueryRpc.Option.MAX_RESULTS, maxResults); + } + + /** + * Returns an option to specify the page token from which to start listing table data. + */ + public static TableDataListOption startPageToken(String pageToken) { + return new TableDataListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); + } + + /** + * Returns an option that sets the zero-based index of the row from which to start listing table + * data. + */ + public static TableDataListOption startIndex(long index) { + checkArgument(index >= 0); + return new TableDataListOption(BigQueryRpc.Option.START_INDEX, index); + } + } + + /** + * Class for specifying job list options. + */ + class JobListOption extends Option { + + private static final long serialVersionUID = -8207122131226481423L; + + private JobListOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to list all jobs, even the ones issued by other users. + */ + public static JobListOption allUsers() { + return new JobListOption(BigQueryRpc.Option.ALL_USERS, true); + } + + /** + * Returns an option to list only jobs that match the provided state filters. + */ + public static JobListOption stateFilter(JobStatus.State... stateFilters) { + List stringFilters = Lists.transform(ImmutableList.copyOf(stateFilters), + new Function() { + @Override + public String apply(JobStatus.State state) { + return state.name().toLowerCase(); + } + }); + return new JobListOption(BigQueryRpc.Option.STATE_FILTER, stringFilters); + } + + /** + * Returns an option to specify the maximum number of jobs to be returned. + */ + public static JobListOption maxResults(long maxResults) { + checkArgument(maxResults >= 0); + return new JobListOption(BigQueryRpc.Option.MAX_RESULTS, maxResults); + } + + /** + * Returns an option to specify the page token from which to start listing jobs. + */ + public static JobListOption startPageToken(String pageToken) { + return new JobListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); + } + + /** + * Returns an option to specify the job's fields to be returned by the RPC call. If this option + * is not provided all job's fields are returned. {@code JobOption.fields()} can be used to + * specify only the fields of interest. {@link JobInfo#jobId()}, {@link JobStatus#state()}, + * {@link JobStatus#error()} as well as type-specific configuration (e.g. + * {@link QueryJobInfo#query()} for Query Jobs) are always returned, even if not specified. + * {@link JobField#SELF_LINK} and {@link JobField#ETAG} can not be selected when listing jobs. + */ + public static JobListOption fields(JobField... fields) { + String selector = JobField.selector(fields); + StringBuilder builder = new StringBuilder(); + builder.append("etag,jobs(").append(selector).append(",state,errorResult),nextPageToken"); + return new JobListOption(BigQueryRpc.Option.FIELDS, builder.toString()); + } + } + + /** + * Class for specifying table get and create options. + */ + class JobOption extends Option { + + private static final long serialVersionUID = -3111736712316353665L; + + private JobOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the job's fields to be returned by the RPC call. If this option + * is not provided all job's fields are returned. {@code JobOption.fields()} can be used to + * specify only the fields of interest. {@link JobInfo#jobId()} as well as type-specific + * configuration (e.g. {@link QueryJobInfo#query()} for Query Jobs) are always returned, even if + * not specified. + */ + public static JobOption fields(JobField... fields) { + return new JobOption(BigQueryRpc.Option.FIELDS, JobField.selector(fields)); + } + } + + /** + * Class for specifying query results options. + */ + class QueryResultsOption extends Option { + + private static final long serialVersionUID = 3788898503226985525L; + + private QueryResultsOption(BigQueryRpc.Option option, Object value) { + super(option, value); + } + + /** + * Returns an option to specify the maximum number of rows to be returned. + */ + public static QueryResultsOption maxResults(long maxResults) { + checkArgument(maxResults >= 0); + return new QueryResultsOption(BigQueryRpc.Option.MAX_RESULTS, maxResults); + } + + /** + * Returns an option to specify the page token from which to start getting query results. + */ + public static QueryResultsOption startPageToken(String pageToken) { + return new QueryResultsOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken); + } + + /** + * Returns an option that sets the zero-based index of the row from which to start getting query + * results. + */ + public static QueryResultsOption startIndex(long startIndex) { + checkArgument(startIndex >= 0); + return new QueryResultsOption(BigQueryRpc.Option.START_INDEX, startIndex); + } + + /** + * Returns an option that sets how long to wait for the query to complete, in milliseconds, + * before returning. Default is 10 seconds. If the timeout passes before the job completes, + * {@link QueryResponse#jobComplete()} will be {@code false}. + */ + public static QueryResultsOption maxWaitTime(long maxWaitTime) { + checkArgument(maxWaitTime >= 0); + return new QueryResultsOption(BigQueryRpc.Option.TIMEOUT, maxWaitTime); + } + } + + /** + * Creates a new dataset. + * + * @throws BigQueryException upon failure + */ + DatasetInfo create(DatasetInfo dataset, DatasetOption... options) throws BigQueryException; + + /** + * Creates a new table. + * + * @throws BigQueryException upon failure + */ + T create(T table, TableOption... options) throws BigQueryException; + + /** + * Creates a new job. + * + * @throws BigQueryException upon failure + */ + T create(T job, JobOption... options) throws BigQueryException; + + /** + * Returns the requested dataset or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + DatasetInfo getDataset(String datasetId, DatasetOption... options) throws BigQueryException; + + /** + * Returns the requested dataset or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + DatasetInfo getDataset(DatasetId datasetId, DatasetOption... options) throws BigQueryException; + + /** + * Lists the project's datasets. This method returns partial information on each dataset + * ({@link DatasetInfo#datasetId()}, {@link DatasetInfo#friendlyName()} and + * {@link DatasetInfo#id()}). To get complete information use either + * {@link #getDataset(String, DatasetOption...)} or + * {@link #getDataset(DatasetId, DatasetOption...)}. + * + * @throws BigQueryException upon failure + */ + Page listDatasets(DatasetListOption... options) throws BigQueryException; + + /** + * Deletes the requested dataset. + * + * @return {@code true} if dataset was deleted, {@code false} if it was not found. + * @throws BigQueryException upon failure + */ + boolean delete(String datasetId, DatasetDeleteOption... options) throws BigQueryException; + + /** + * Deletes the requested dataset. + * + * @return {@code true} if dataset was deleted, {@code false} if it was not found. + * @throws BigQueryException upon failure + */ + boolean delete(DatasetId datasetId, DatasetDeleteOption... options) throws BigQueryException; + + /** + * Deletes the requested table. + * + * @return {@code true} if table was deleted, {@code false} if it was not found. + * @throws BigQueryException upon failure + */ + boolean delete(String datasetId, String tableId) throws BigQueryException; + + /** + * Deletes the requested table. + * + * @return {@code true} if table was deleted, {@code false} if it was not found. + * @throws BigQueryException upon failure + */ + boolean delete(TableId tableId) throws BigQueryException; + + /** + * Updates dataset information. + * + * @throws BigQueryException upon failure + */ + DatasetInfo update(DatasetInfo dataset, DatasetOption... options) throws BigQueryException; + + /** + * Updates table information. + * + * @throws BigQueryException upon failure + */ + T update(T table, TableOption... options) throws BigQueryException; + + /** + * Returns the requested table or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + T getTable(String datasetId, String tableId, TableOption... options) + throws BigQueryException; + + /** + * Returns the requested table or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + T getTable(TableId tableId, TableOption... options) + throws BigQueryException; + + /** + * Lists the tables in the dataset. This method returns partial information on each table + * ({@link BaseTableInfo#tableId()}, {@link BaseTableInfo#friendlyName()}, + * {@link BaseTableInfo#id()} and {@link BaseTableInfo#type()}). To get complete information use + * either {@link #getTable(TableId, TableOption...)} or + * {@link #getTable(String, String, TableOption...)}. + * + * @throws BigQueryException upon failure + */ + Page listTables(String datasetId, TableListOption... options) + throws BigQueryException; + + /** + * Lists the tables in the dataset. This method returns partial information on each table + * ({@link BaseTableInfo#tableId()}, {@link BaseTableInfo#friendlyName()}, + * {@link BaseTableInfo#id()} and {@link BaseTableInfo#type()}). To get complete information use + * either {@link #getTable(TableId, TableOption...)} or + * {@link #getTable(String, String, TableOption...)}. + * + * @throws BigQueryException upon failure + */ + Page listTables(DatasetId datasetId, TableListOption... options) + throws BigQueryException; + + /** + * Sends an insert all request. + * + * @throws BigQueryException upon failure + */ + InsertAllResponse insertAll(InsertAllRequest request) throws BigQueryException; + + /** + * Lists the table's rows. + * + * @throws BigQueryException upon failure + */ + Page> listTableData(String datasetId, String tableId, + TableDataListOption... options) throws BigQueryException; + + /** + * Lists the table's rows. + * + * @throws BigQueryException upon failure + */ + Page> listTableData(TableId tableId, TableDataListOption... options) + throws BigQueryException; + + /** + * Returns the requested job or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + T getJob(String jobId, JobOption... options) throws BigQueryException; + + /** + * Returns the requested job or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + T getJob(JobId jobId, JobOption... options) throws BigQueryException; + + /** + * Lists the jobs. + * + * @throws BigQueryException upon failure + */ + Page listJobs(JobListOption... options) throws BigQueryException; + + /** + * Sends a job cancel request. This call will return immediately. The job status can then be + * checked using either {@link #getJob(JobId, JobOption...)} or + * {@link #getJob(String, JobOption...)}). + * + * @return {@code true} if cancel was requested successfully, {@code false} if the job was not + * found + * @throws BigQueryException upon failure + */ + boolean cancel(String jobId) throws BigQueryException; + + /** + * Sends a job cancel request. This call will return immediately. The job status can then be + * checked using either {@link #getJob(JobId, JobOption...)} or + * {@link #getJob(String, JobOption...)}). + * + * @return {@code true} if cancel was requested successfully, {@code false} if the job was not + * found + * @throws BigQueryException upon failure + */ + boolean cancel(JobId tableId) throws BigQueryException; + + /** + * Runs the query associated with the request. + * + * @throws BigQueryException upon failure + */ + QueryResponse query(QueryRequest request) throws BigQueryException; + + /** + * Returns results of the query associated with the provided job. + * + * @throws BigQueryException upon failure + */ + QueryResponse getQueryResults(JobId job, QueryResultsOption... options) throws BigQueryException; +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryError.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryError.java new file mode 100644 index 000000000000..2d89bccf62ea --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryError.java @@ -0,0 +1,119 @@ +package com.google.gcloud.bigquery; + +import com.google.api.services.bigquery.model.ErrorProto; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Google Cloud BigQuery Job Error. Objects of this class represent errors occurred during the + * execution of a BigQuery Job. + */ +public class BigQueryError implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public BigQueryError apply(ErrorProto pb) { + return BigQueryError.fromPb(pb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public ErrorProto apply(BigQueryError error) { + return error.toPb(); + } + }; + private static final long serialVersionUID = -6566785320629096688L; + + private final String reason; + private final String location; + private final String debugInfo; + private final String message; + + BigQueryError(String reason, String location, String message, String debugInfo) { + this.reason = reason; + this.location = location; + this.debugInfo = debugInfo; + this.message = message; + } + + BigQueryError(String reason, String location, String message) { + this.reason = reason; + this.location = location; + this.message = message; + this.debugInfo = null; + } + + /** + * Returns short error code that summarizes the error. + * + * @see Troubleshooting + * Errors + */ + public String reason() { + return reason; + } + + /** + * Returns where the error occurred, if present. + */ + public String location() { + return location; + } + + String debugInfo() { + return debugInfo; + } + + /** + * Returns a human-readable description of the error. + */ + public String message() { + return message; + } + + @Override + public int hashCode() { + return Objects.hash(reason, location, message); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("reason", reason) + .add("location", location) + .add("message", message) + .toString(); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof BigQueryError && Objects.equals(toPb(), ((BigQueryError) obj).toPb()); + } + + ErrorProto toPb() { + ErrorProto errorPb = new ErrorProto(); + if (reason != null) { + errorPb.setReason(reason); + } + if (location != null) { + errorPb.setLocation(location); + } + if (message != null) { + errorPb.setMessage(message); + } + if (debugInfo != null) { + errorPb.setDebugInfo(debugInfo); + } + return errorPb; + } + + static BigQueryError fromPb(ErrorProto errorPb) { + return new BigQueryError(errorPb.getReason(), errorPb.getLocation(), errorPb.getMessage(), + errorPb.getDebugInfo()); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java new file mode 100644 index 000000000000..020917762fa3 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryException.java @@ -0,0 +1,54 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.gcloud.BaseServiceException; +import com.google.gcloud.RetryHelper.RetryHelperException; +import com.google.gcloud.RetryHelper.RetryInterruptedException; + +/** + * BigQuery service exception. + * + * @see Google Cloud + * BigQuery error codes + */ +public class BigQueryException extends BaseServiceException { + + private static final long serialVersionUID = -5504832700512784654L; + public static final int UNKNOWN_CODE = -1; + + public BigQueryException(int code, String message, boolean retryable) { + super(code, message, retryable); + } + + /** + * Translate RetryHelperException to the BigQueryException that caused the error. This method will + * always throw an exception. + * + * @throws BigQueryException when {@code ex} was caused by a {@code BigQueryException} + * @throws RetryInterruptedException when {@code ex} is a {@code RetryInterruptedException} + */ + static BigQueryException translateAndThrow(RetryHelperException ex) { + if (ex.getCause() instanceof BigQueryException) { + throw (BigQueryException) ex.getCause(); + } + if (ex instanceof RetryInterruptedException) { + RetryInterruptedException.propagate(); + } + throw new BigQueryException(UNKNOWN_CODE, ex.getMessage(), false); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryFactory.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryFactory.java new file mode 100644 index 000000000000..90e7bbccd483 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryFactory.java @@ -0,0 +1,25 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.gcloud.ServiceFactory; + +/** + * An interface for BigQuery factories. + */ +public interface BigQueryFactory extends ServiceFactory { +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java new file mode 100644 index 000000000000..62685d8ecc46 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java @@ -0,0 +1,715 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.gcloud.RetryHelper.runWithRetries; + +import com.google.api.services.bigquery.model.Dataset; +import com.google.api.services.bigquery.model.GetQueryResultsResponse; +import com.google.api.services.bigquery.model.Job; +import com.google.api.services.bigquery.model.Table; +import com.google.api.services.bigquery.model.TableDataInsertAllRequest; +import com.google.api.services.bigquery.model.TableDataInsertAllRequest.Rows; +import com.google.api.services.bigquery.model.TableReference; +import com.google.api.services.bigquery.model.TableRow; +import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.gcloud.BaseService; +import com.google.gcloud.ExceptionHandler; +import com.google.gcloud.ExceptionHandler.Interceptor; +import com.google.gcloud.Page; +import com.google.gcloud.PageImpl; +import com.google.gcloud.RetryHelper; +import com.google.gcloud.bigquery.InsertAllRequest.RowToInsert; +import com.google.gcloud.spi.BigQueryRpc; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; + +final class BigQueryImpl extends BaseService implements BigQuery { + + private static final Interceptor EXCEPTION_HANDLER_INTERCEPTOR = new Interceptor() { + + private static final long serialVersionUID = -7478333733015750774L; + + @Override + public RetryResult afterEval(Exception exception, RetryResult retryResult) { + return Interceptor.RetryResult.CONTINUE_EVALUATION; + } + + @Override + public RetryResult beforeEval(Exception exception) { + if (exception instanceof BigQueryException) { + boolean retriable = ((BigQueryException) exception).retryable(); + return retriable ? Interceptor.RetryResult.RETRY : Interceptor.RetryResult.NO_RETRY; + } + return Interceptor.RetryResult.CONTINUE_EVALUATION; + } + }; + static final ExceptionHandler EXCEPTION_HANDLER = ExceptionHandler.builder() + .abortOn(RuntimeException.class).interceptor(EXCEPTION_HANDLER_INTERCEPTOR).build(); + + private abstract static class BasePageFetcher implements PageImpl.NextPageFetcher { + + private static final long serialVersionUID = -338124488600215401L; + + protected final Map requestOptions; + protected final BigQueryOptions serviceOptions; + + BasePageFetcher(BigQueryOptions serviceOptions, String cursor, + Map optionMap) { + this.serviceOptions = serviceOptions; + ImmutableMap.Builder builder = ImmutableMap.builder(); + if (cursor != null) { + builder.put(BigQueryRpc.Option.PAGE_TOKEN, cursor); + } + for (Map.Entry option : optionMap.entrySet()) { + if (option.getKey() != BigQueryRpc.Option.PAGE_TOKEN) { + builder.put(option.getKey(), option.getValue()); + } + } + this.requestOptions = builder.build(); + } + } + + private static class DatasetPageFetcher extends BasePageFetcher { + + private static final long serialVersionUID = 3030824397616608646L; + + DatasetPageFetcher(BigQueryOptions serviceOptions, String cursor, + Map optionMap) { + super(serviceOptions, cursor, optionMap); + } + + @Override + public Page nextPage() { + return listDatasets(serviceOptions, requestOptions); + } + } + + private static class TablePageFetcher extends BasePageFetcher { + + private static final long serialVersionUID = 5908129355985236115L; + private final String dataset; + + TablePageFetcher(String dataset, BigQueryOptions serviceOptions, String cursor, + Map optionMap) { + super(serviceOptions, cursor, optionMap); + this.dataset = dataset; + } + + @Override + public Page nextPage() { + return listTables(dataset, serviceOptions, requestOptions); + } + } + + private static class JobPageFetcher extends BasePageFetcher { + + private static final long serialVersionUID = -4984845360519279880L; + + JobPageFetcher(BigQueryOptions serviceOptions, String cursor, + Map optionMap) { + super(serviceOptions, cursor, optionMap); + } + + @Override + public Page nextPage() { + return listJobs(serviceOptions, requestOptions); + } + } + + private static class TableDataPageFetcher extends BasePageFetcher> { + + private static final long serialVersionUID = 1281938239570262432L; + private final TableId table; + + TableDataPageFetcher(TableId table, BigQueryOptions serviceOptions, String cursor, + Map optionMap) { + super(serviceOptions, cursor, optionMap); + this.table = table; + } + + @Override + public Page> nextPage() { + return listTableData(table, serviceOptions, requestOptions); + } + } + + private static class QueryResultsPageFetcherImpl extends BasePageFetcher> + implements QueryResult.QueryResultsPageFetcher { + + private static final long serialVersionUID = 6713948754731557486L; + private final JobId job; + + QueryResultsPageFetcherImpl(JobId job, BigQueryOptions serviceOptions, String cursor, + Map optionMap) { + super(serviceOptions, cursor, optionMap); + this.job = job; + } + + @Override + public QueryResult nextPage() { + return getQueryResults(job, serviceOptions, requestOptions).result(); + } + } + + private final BigQueryRpc bigQueryRpc; + + BigQueryImpl(BigQueryOptions options) { + super(options); + bigQueryRpc = options.rpc(); + } + + @Override + public DatasetInfo create(DatasetInfo dataset, DatasetOption... options) + throws BigQueryException { + final Dataset datasetPb = setProjectId(dataset).toPb(); + final Map optionsMap = optionMap(options); + try { + return DatasetInfo.fromPb(runWithRetries(new Callable() { + @Override + public Dataset call() { + return bigQueryRpc.create(datasetPb, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public T create(T table, TableOption... options) + throws BigQueryException { + final Table tablePb = setProjectId(table).toPb(); + final Map optionsMap = optionMap(options); + try { + return BaseTableInfo.fromPb(runWithRetries(new Callable() { + @Override + public Table call() { + return bigQueryRpc.create(tablePb, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public T create(T job, JobOption... options) throws BigQueryException { + final Job jobPb = setProjectId(job).toPb(); + final Map optionsMap = optionMap(options); + try { + return JobInfo.fromPb(runWithRetries(new Callable() { + @Override + public Job call() { + return bigQueryRpc.create(jobPb, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public DatasetInfo getDataset(String datasetId, DatasetOption... options) + throws BigQueryException { + return getDataset(DatasetId.of(datasetId), options); + } + + @Override + public DatasetInfo getDataset(final DatasetId datasetId, DatasetOption... options) + throws BigQueryException { + final Map optionsMap = optionMap(options); + try { + Dataset answer = runWithRetries(new Callable() { + @Override + public Dataset call() { + return bigQueryRpc.getDataset(datasetId.dataset(), optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER); + return answer == null ? null : DatasetInfo.fromPb(answer); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public Page listDatasets(DatasetListOption... options) throws BigQueryException { + return listDatasets(options(), optionMap(options)); + } + + private static Page listDatasets(final BigQueryOptions serviceOptions, + final Map optionsMap) { + try { + BigQueryRpc.Tuple> result = + runWithRetries(new Callable>>() { + @Override + public BigQueryRpc.Tuple> call() { + return serviceOptions.rpc().listDatasets(optionsMap); + } + }, serviceOptions.retryParams(), EXCEPTION_HANDLER); + String cursor = result.x(); + return new PageImpl<>(new DatasetPageFetcher(serviceOptions, cursor, optionsMap), cursor, + Iterables.transform(result.y(), DatasetInfo.FROM_PB_FUNCTION)); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public boolean delete(String datasetId, DatasetDeleteOption... options) throws BigQueryException { + return delete(DatasetId.of(datasetId), options); + } + + @Override + public boolean delete(final DatasetId datasetId, DatasetDeleteOption... options) + throws BigQueryException { + final Map optionsMap = optionMap(options); + try { + return runWithRetries(new Callable() { + @Override + public Boolean call() { + return bigQueryRpc.deleteDataset(datasetId.dataset(), optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public boolean delete(String datasetId, String tableId) throws BigQueryException { + return delete(TableId.of(datasetId, tableId)); + } + + @Override + public boolean delete(final TableId tableId) throws BigQueryException { + try { + return runWithRetries(new Callable() { + @Override + public Boolean call() { + return bigQueryRpc.deleteTable(tableId.dataset(), tableId.table()); + } + }, options().retryParams(), EXCEPTION_HANDLER); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public DatasetInfo update(DatasetInfo dataset, DatasetOption... options) + throws BigQueryException { + final Dataset datasetPb = setProjectId(dataset).toPb(); + final Map optionsMap = optionMap(options); + try { + return DatasetInfo.fromPb(runWithRetries(new Callable() { + @Override + public Dataset call() { + return bigQueryRpc.patch(datasetPb, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public T update(T table, TableOption... options) + throws BigQueryException { + final Table tablePb = setProjectId(table).toPb(); + final Map optionsMap = optionMap(options); + try { + return BaseTableInfo.fromPb(runWithRetries(new Callable
() { + @Override + public Table call() { + return bigQueryRpc.patch(tablePb, optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER)); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public T getTable(final String datasetId, final String tableId, + TableOption... options) throws BigQueryException { + return getTable(TableId.of(datasetId, tableId), options); + } + + @Override + public T getTable(final TableId tableId, TableOption... options) + throws BigQueryException { + final Map optionsMap = optionMap(options); + try { + Table answer = runWithRetries(new Callable
() { + @Override + public Table call() { + return bigQueryRpc.getTable(tableId.dataset(), tableId.table(), optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER); + return answer == null ? null : BaseTableInfo.fromPb(answer); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public Page listTables(String datasetId, TableListOption... options) + throws BigQueryException { + return listTables(datasetId, options(), optionMap(options)); + } + + @Override + public Page listTables(DatasetId datasetId, TableListOption... options) + throws BigQueryException { + return listTables(datasetId.dataset(), options(), optionMap(options)); + } + + private static Page listTables(final String datasetId, final BigQueryOptions + serviceOptions, final Map optionsMap) { + try { + BigQueryRpc.Tuple> result = + runWithRetries(new Callable>>() { + @Override + public BigQueryRpc.Tuple> call() { + return serviceOptions.rpc().listTables(datasetId, optionsMap); + } + }, serviceOptions.retryParams(), EXCEPTION_HANDLER); + String cursor = result.x(); + Iterable tables = Iterables.transform(result.y(), + BaseTableInfo.FROM_PB_FUNCTION); + return new PageImpl<>(new TablePageFetcher(datasetId, serviceOptions, cursor, optionsMap), + cursor, tables); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public InsertAllResponse insertAll(InsertAllRequest request) throws BigQueryException { + final TableId tableId = request.table(); + final TableDataInsertAllRequest requestPb = new TableDataInsertAllRequest(); + requestPb.setIgnoreUnknownValues(request.ignoreUnknownValues()); + requestPb.setSkipInvalidRows(request.skipInvalidRows()); + List rowsPb = Lists.transform(request.rows(), new Function() { + @Override + public Rows apply(RowToInsert rowToInsert) { + return new Rows().setInsertId(rowToInsert.id()).setJson(rowToInsert.content()); + } + }); + requestPb.setRows(rowsPb); + return InsertAllResponse.fromPb( + bigQueryRpc.insertAll(tableId.dataset(), tableId.table(), requestPb)); + } + + @Override + public Page> listTableData(String datasetId, String tableId, + TableDataListOption... options) throws BigQueryException { + return listTableData(TableId.of(datasetId, tableId), options(), optionMap(options)); + } + + @Override + public Page> listTableData(TableId tableId, TableDataListOption... options) + throws BigQueryException { + return listTableData(tableId, options(), optionMap(options)); + } + + private static Page> listTableData(final TableId tableId, + final BigQueryOptions serviceOptions, final Map optionsMap) { + try { + BigQueryRpc.Tuple> result = + runWithRetries(new Callable>>() { + @Override + public BigQueryRpc.Tuple> call() { + return serviceOptions.rpc() + .listTableData(tableId.dataset(), tableId.table(), optionsMap); + } + }, serviceOptions.retryParams(), EXCEPTION_HANDLER); + String cursor = result.x(); + return new PageImpl<>(new TableDataPageFetcher(tableId, serviceOptions, cursor, optionsMap), + cursor, transformTableData(result.y())); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + private static List> transformTableData(Iterable tableDataPb) { + return ImmutableList.copyOf( + Iterables.transform(tableDataPb != null ? tableDataPb : ImmutableList.of(), + new Function>() { + @Override + public List apply(TableRow rowPb) { + return Lists.transform(rowPb.getF(), FieldValue.FROM_PB_FUNCTION); + } + })); + } + + @Override + public T getJob(String jobId, JobOption... options) throws BigQueryException { + return getJob(JobId.of(jobId), options); + } + + @Override + public T getJob(final JobId jobId, JobOption... options) + throws BigQueryException { + final Map optionsMap = optionMap(options); + try { + Job answer = runWithRetries(new Callable() { + @Override + public Job call() { + return bigQueryRpc.getJob(jobId.job(), optionsMap); + } + }, options().retryParams(), EXCEPTION_HANDLER); + return answer == null ? null : JobInfo.fromPb(answer); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public Page listJobs(JobListOption... options) throws BigQueryException { + return listJobs(options(), optionMap(options)); + } + + private static Page listJobs(final BigQueryOptions serviceOptions, + final Map optionsMap) { + BigQueryRpc.Tuple> result = + runWithRetries(new Callable>>() { + @Override + public BigQueryRpc.Tuple> call() { + return serviceOptions.rpc().listJobs(optionsMap); + } + }, serviceOptions.retryParams(), EXCEPTION_HANDLER); + String cursor = result.x(); + Iterable jobs = Iterables.transform(result.y(), JobInfo.FROM_PB_FUNCTION); + return new PageImpl<>(new JobPageFetcher(serviceOptions, cursor, optionsMap), cursor, jobs); + } + + @Override + public boolean cancel(String jobId) throws BigQueryException { + return cancel(JobId.of(jobId)); + } + + @Override + public boolean cancel(final JobId jobId) throws BigQueryException { + try { + return runWithRetries(new Callable() { + @Override + public Boolean call() { + return bigQueryRpc.cancel(jobId.job()); + } + }, options().retryParams(), EXCEPTION_HANDLER); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public QueryResponse query(final QueryRequest request) throws BigQueryException { + try { + com.google.api.services.bigquery.model.QueryResponse results = + runWithRetries(new Callable() { + @Override + public com.google.api.services.bigquery.model.QueryResponse call() { + return bigQueryRpc.query(setProjectId(request).toPb()); + } + }, options().retryParams(), EXCEPTION_HANDLER); + QueryResponse.Builder builder = QueryResponse.builder(); + JobId completeJobId = JobId.fromPb(results.getJobReference()); + builder.jobId(completeJobId); + builder.jobComplete(results.getJobComplete()); + List rowsPb = results.getRows(); + if (results.getJobComplete()) { + builder.jobComplete(true); + QueryResult.Builder resultBuilder = transformQueryResults(completeJobId, rowsPb, + results.getPageToken(), options(), ImmutableMap.of()); + resultBuilder.totalBytesProcessed(results.getTotalBytesProcessed()); + resultBuilder.cacheHit(results.getCacheHit()); + if (results.getSchema() != null) { + resultBuilder.schema(Schema.fromPb(results.getSchema())); + } + if (results.getTotalRows() != null) { + resultBuilder.totalRows(results.getTotalRows().longValue()); + } + builder.result(resultBuilder.build()); + } + if (results.getErrors() != null) { + builder.executionErrors( + Lists.transform(results.getErrors(), BigQueryError.FROM_PB_FUNCTION)); + } + return builder.build(); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @Override + public QueryResponse getQueryResults(JobId job, QueryResultsOption... options) + throws BigQueryException { + Map optionsMap = optionMap(options); + return getQueryResults(job, options(), optionsMap); + } + + private static QueryResponse getQueryResults(final JobId jobId, + final BigQueryOptions serviceOptions, final Map optionsMap) { + try { + GetQueryResultsResponse results = + runWithRetries(new Callable() { + @Override + public GetQueryResultsResponse call() { + return serviceOptions.rpc().getQueryResults(jobId.job(), optionsMap); + } + }, serviceOptions.retryParams(), EXCEPTION_HANDLER); + QueryResponse.Builder builder = QueryResponse.builder(); + JobId completeJobId = JobId.fromPb(results.getJobReference()); + builder.jobId(completeJobId); + builder.etag(results.getEtag()); + builder.jobComplete(results.getJobComplete()); + List rowsPb = results.getRows(); + if (results.getJobComplete()) { + QueryResult.Builder resultBuilder = transformQueryResults(completeJobId, rowsPb, + results.getPageToken(), serviceOptions, ImmutableMap.of()); + resultBuilder.totalBytesProcessed(results.getTotalBytesProcessed()); + resultBuilder.cacheHit(results.getCacheHit()); + if (results.getSchema() != null) { + resultBuilder.schema(Schema.fromPb(results.getSchema())); + } + if (results.getTotalRows() != null) { + resultBuilder.totalRows(results.getTotalRows().longValue()); + } + builder.result(resultBuilder.build()); + } + if (results.getErrors() != null) { + builder.executionErrors( + Lists.transform(results.getErrors(), BigQueryError.FROM_PB_FUNCTION)); + } + return builder.build(); + } catch (RetryHelper.RetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + private static QueryResult.Builder transformQueryResults(JobId jobId, List rowsPb, + String cursor, BigQueryOptions serviceOptions, Map optionsMap) { + QueryResultsPageFetcherImpl nextPageFetcher = + new QueryResultsPageFetcherImpl(jobId, serviceOptions, cursor, optionsMap); + return QueryResult.builder() + .pageFetcher(nextPageFetcher) + .cursor(cursor) + .results(transformTableData(rowsPb)); + } + + private Map optionMap(Option... options) { + Map optionMap = Maps.newEnumMap(BigQueryRpc.Option.class); + for (Option option : options) { + Object prev = optionMap.put(option.rpcOption(), option.value()); + checkArgument(prev == null, "Duplicate option %s", option); + } + return optionMap; + } + + private DatasetInfo setProjectId(DatasetInfo dataset) { + DatasetInfo.Builder datasetBuilder = dataset.toBuilder(); + datasetBuilder.datasetId(setProjectId(dataset.datasetId())); + if (dataset.acl() != null) { + List acls = Lists.newArrayListWithCapacity(dataset.acl().size()); + for (Acl acl : dataset.acl()) { + if (acl.entity().type() == Acl.Entity.Type.VIEW) { + Dataset.Access accessPb = acl.toPb(); + TableReference viewReferencePb = accessPb.getView(); + if (viewReferencePb.getProjectId() == null) { + viewReferencePb.setProjectId(options().projectId()); + } + acls.add(new Acl(new Acl.View(TableId.fromPb(viewReferencePb)))); + } else { + acls.add(acl); + } + } + datasetBuilder.acl(acls); + } + return datasetBuilder.build(); + } + + private DatasetId setProjectId(DatasetId dataset) { + return dataset.project() != null ? dataset + : DatasetId.of(options().projectId(), dataset.dataset()); + } + + private BaseTableInfo setProjectId(BaseTableInfo table) { + return table.toBuilder().tableId(setProjectId(table.tableId())).build(); + } + + private TableId setProjectId(TableId table) { + return table.project() != null ? table + : TableId.of(options().projectId(), table.dataset(), table.table()); + } + + private JobInfo setProjectId(JobInfo job) { + if (job instanceof CopyJobInfo) { + CopyJobInfo copyJob = (CopyJobInfo) job; + CopyJobInfo.Builder copyBuilder = copyJob.toBuilder(); + copyBuilder.destinationTable(setProjectId(copyJob.destinationTable())); + copyBuilder.sourceTables( + Lists.transform(copyJob.sourceTables(), new Function() { + @Override + public TableId apply(TableId tableId) { + return setProjectId(tableId); + } + })); + return copyBuilder.build(); + } + if (job instanceof QueryJobInfo) { + QueryJobInfo queryJob = (QueryJobInfo) job; + QueryJobInfo.Builder queryBuilder = queryJob.toBuilder(); + if (queryJob.destinationTable() != null) { + queryBuilder.destinationTable(setProjectId(queryJob.destinationTable())); + } + if (queryJob.defaultDataset() != null) { + queryBuilder.defaultDataset(setProjectId(queryJob.defaultDataset())); + } + return queryBuilder.build(); + } + if (job instanceof ExtractJobInfo) { + ExtractJobInfo extractJob = (ExtractJobInfo) job; + ExtractJobInfo.Builder extractBuilder = extractJob.toBuilder(); + extractBuilder.sourceTable(setProjectId(extractJob.sourceTable())); + return extractBuilder.build(); + } + if (job instanceof LoadJobInfo) { + LoadJobInfo loadJob = (LoadJobInfo) job; + LoadJobInfo.Builder loadBuilder = loadJob.toBuilder(); + loadBuilder.destinationTable(setProjectId(loadJob.destinationTable())); + return loadBuilder.build(); + } + return job; + } + + private QueryRequest setProjectId(QueryRequest request) { + QueryRequest.Builder builder = request.toBuilder(); + if (request.defaultDataset() != null) { + builder.defaultDataset(setProjectId(request.defaultDataset())); + } + return builder.build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryOptions.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryOptions.java new file mode 100644 index 000000000000..71d43cfbe565 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryOptions.java @@ -0,0 +1,114 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.common.collect.ImmutableSet; +import com.google.gcloud.ServiceOptions; +import com.google.gcloud.spi.BigQueryRpc; +import com.google.gcloud.spi.BigQueryRpcFactory; +import com.google.gcloud.spi.DefaultBigQueryRpc; + +import java.util.Set; + +public class BigQueryOptions extends ServiceOptions { + + private static final String BIGQUERY_SCOPE = "https://www.googleapis.com/auth/bigquery"; + private static final Set SCOPES = ImmutableSet.of(BIGQUERY_SCOPE); + private static final long serialVersionUID = -215981591481708043L; + + public static class DefaultBigqueryFactory implements BigQueryFactory { + + private static final BigQueryFactory INSTANCE = new DefaultBigqueryFactory(); + + @Override + public BigQuery create(BigQueryOptions options) { + return new BigQueryImpl(options); + } + } + + public static class DefaultBigQueryRpcFactory implements BigQueryRpcFactory { + + private static final BigQueryRpcFactory INSTANCE = new DefaultBigQueryRpcFactory(); + + @Override + public BigQueryRpc create(BigQueryOptions options) { + return new DefaultBigQueryRpc(options); + } + } + + public static class Builder extends + ServiceOptions.Builder { + + private Builder() { + } + + private Builder(BigQueryOptions options) { + super(options); + } + + @Override + public BigQueryOptions build() { + return new BigQueryOptions(this); + } + } + + private BigQueryOptions(Builder builder) { + super(BigQueryFactory.class, BigQueryRpcFactory.class, builder); + } + + @Override + protected BigQueryFactory defaultServiceFactory() { + return DefaultBigqueryFactory.INSTANCE; + } + + @Override + protected BigQueryRpcFactory defaultRpcFactory() { + return DefaultBigQueryRpcFactory.INSTANCE; + } + + @Override + protected Set scopes() { + return SCOPES; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public int hashCode() { + return baseHashCode(); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof BigQueryOptions)) { + return false; + } + BigQueryOptions other = (BigQueryOptions) obj; + return baseEquals(other); + } + + public static BigQueryOptions defaultInstance() { + return builder().build(); + } + + public static Builder builder() { + return new Builder(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CopyJobInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CopyJobInfo.java new file mode 100644 index 000000000000..a3247b78d5b8 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CopyJobInfo.java @@ -0,0 +1,258 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.Job; +import com.google.api.services.bigquery.model.JobConfiguration; +import com.google.api.services.bigquery.model.JobConfigurationTableCopy; +import com.google.common.base.MoreObjects.ToStringHelper; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery Copy Job. A Copy Job copies an existing table to another new or existing table. + */ +public class CopyJobInfo extends JobInfo { + + private static final long serialVersionUID = 7830335512951916299L; + + private final List sourceTables; + private final TableId destinationTable; + private final CreateDisposition createDisposition; + private final WriteDisposition writeDisposition; + + public static final class Builder extends JobInfo.Builder { + + private List sourceTables; + private TableId destinationTable; + private CreateDisposition createDisposition; + private WriteDisposition writeDisposition; + + private Builder() {} + + private Builder(CopyJobInfo jobInfo) { + super(jobInfo); + this.sourceTables = jobInfo.sourceTables; + this.destinationTable = jobInfo.destinationTable; + this.createDisposition = jobInfo.createDisposition; + this.writeDisposition = jobInfo.writeDisposition; + } + + private Builder(Job jobPb) { + super(jobPb); + JobConfigurationTableCopy copyConfigurationPb = jobPb.getConfiguration().getCopy(); + this.destinationTable = TableId.fromPb(copyConfigurationPb.getDestinationTable()); + if (copyConfigurationPb.getSourceTables() != null) { + this.sourceTables = + Lists.transform(copyConfigurationPb.getSourceTables(), TableId.FROM_PB_FUNCTION); + } else { + this.sourceTables = ImmutableList.of(TableId.fromPb(copyConfigurationPb.getSourceTable())); + } + if (copyConfigurationPb.getCreateDisposition() != null) { + this.createDisposition = + CreateDisposition.valueOf(copyConfigurationPb.getCreateDisposition()); + } + if (copyConfigurationPb.getWriteDisposition() != null) { + this.writeDisposition = WriteDisposition.valueOf(copyConfigurationPb.getWriteDisposition()); + } + } + + /** + * Sets the source tables to copy. + */ + public Builder sourceTables(List sourceTables) { + this.sourceTables = sourceTables != null ? ImmutableList.copyOf(sourceTables) : null; + return self(); + } + + /** + * Sets the destination table of the copy job. + */ + public Builder destinationTable(TableId destinationTable) { + this.destinationTable = destinationTable; + return self(); + } + + /** + * Sets whether the job is allowed to create new tables. + * + * @see + * Jobs: Link Configuration + */ + public Builder createDisposition(CreateDisposition createDisposition) { + this.createDisposition = createDisposition; + return self(); + } + + /** + * Sets the action that should occur if the destination table already exists. + * + * @see + * Jobs: Link Configuration + */ + public Builder writeDisposition(WriteDisposition writeDisposition) { + this.writeDisposition = writeDisposition; + return self(); + } + + @Override + public CopyJobInfo build() { + return new CopyJobInfo(this); + } + } + + private CopyJobInfo(Builder builder) { + super(builder); + this.sourceTables = checkNotNull(builder.sourceTables); + this.destinationTable = checkNotNull(builder.destinationTable); + this.createDisposition = builder.createDisposition; + this.writeDisposition = builder.writeDisposition; + } + + /** + * Returns the source tables to copy. + */ + public List sourceTables() { + return sourceTables; + } + + /** + * Returns the destination table to load the data into. + */ + public TableId destinationTable() { + return destinationTable; + } + + /** + * Returns whether the job is allowed to create new tables. + * + * @see + * Jobs: Copy Configuration + */ + public CreateDisposition createDisposition() { + return this.createDisposition; + } + + /** + * Returns the action that should occur if the destination table already exists. + * + * @see + * Jobs: Copy Configuration + */ + public WriteDisposition writeDisposition() { + return writeDisposition; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("sourceTables", sourceTables) + .add("destinationTable", destinationTable) + .add("createDisposition", createDisposition) + .add("writeDisposition", writeDisposition); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof CopyJobInfo && Objects.equals(toPb(), ((CopyJobInfo) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), sourceTables, destinationTable, createDisposition, + writeDisposition); + } + + @Override + Job toPb() { + JobConfigurationTableCopy copyConfigurationPb = new JobConfigurationTableCopy(); + copyConfigurationPb.setDestinationTable(destinationTable.toPb()); + if (sourceTables.size() == 1) { + copyConfigurationPb.setSourceTable(sourceTables.get(0).toPb()); + } else { + copyConfigurationPb.setSourceTables(Lists.transform(sourceTables, TableId.TO_PB_FUNCTION)); + } + if (createDisposition != null) { + copyConfigurationPb.setCreateDisposition(createDisposition.toString()); + } + if (writeDisposition != null) { + copyConfigurationPb.setWriteDisposition(writeDisposition.toString()); + } + return super.toPb().setConfiguration(new JobConfiguration().setCopy(copyConfigurationPb)); + } + + /** + * Creates a builder for a BigQuery Copy Job given destination and source table. + */ + public static Builder builder(TableId destinationTable, TableId sourceTable) { + return builder(destinationTable, ImmutableList.of(checkNotNull(sourceTable))); + } + + /** + * Creates a builder for a BigQuery Copy Job given destination and source tables. + */ + public static Builder builder(TableId destinationTable, List sourceTables) { + return new Builder().destinationTable(destinationTable).sourceTables(sourceTables); + } + + /** + * Returns a BigQuery Copy Job for the given destination and source table. Job's id is chosen by + * the service. + */ + public static CopyJobInfo of(TableId destinationTable, TableId sourceTable) { + return builder(destinationTable, sourceTable).build(); + } + + /** + * Returns a BigQuery Copy Job for the given destination and source tables. Job's id is chosen by + * the service. + */ + public static CopyJobInfo of(TableId destinationTable, List sourceTables) { + return builder(destinationTable, sourceTables).build(); + } + + /** + * Returns a BigQuery Copy Job for the given destination and source table. Job's id is set to the + * provided value. + */ + public static CopyJobInfo of(JobId jobId, TableId destinationTable, TableId sourceTable) { + return builder(destinationTable, sourceTable).jobId(jobId).build(); + } + + /** + * Returns a BigQuery Copy Job for the given destination and source tables. Job's id is set to the + * provided value. + */ + public static CopyJobInfo of(JobId jobId, TableId destinationTable, List sourceTables) { + return builder(destinationTable, sourceTables).jobId(jobId).build(); + } + + @SuppressWarnings("unchecked") + static CopyJobInfo fromPb(Job jobPb) { + return new Builder(jobPb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CsvOptions.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CsvOptions.java new file mode 100644 index 000000000000..274ef5678a8a --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/CsvOptions.java @@ -0,0 +1,271 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.common.base.MoreObjects; + +import java.nio.charset.Charset; +import java.util.Objects; + +/** + * Google BigQuery options for CSV format. This class wraps some properties of CSV files used by + * BigQuery to parse external data. + */ +public class CsvOptions extends FormatOptions { + + private static final long serialVersionUID = 2193570529308612708L; + + private final Boolean allowJaggedRows; + private final Boolean allowQuotedNewLines; + private final String encoding; + private final String fieldDelimiter; + private final String quote; + private final Integer skipLeadingRows; + + public static final class Builder { + + private Boolean allowJaggedRows; + private Boolean allowQuotedNewLines; + private String encoding; + private String fieldDelimiter; + private String quote; + private Integer skipLeadingRows; + + private Builder() {} + + /** + * Set whether BigQuery should accept rows that are missing trailing optional columns. If + * {@code true}, BigQuery treats missing trailing columns as null values. If {@code false}, + * records with missing trailing columns are treated as bad records, and if there are too many + * bad records, an invalid error is returned in the job result. By default, rows with missing + * trailing columns are considered bad records. + */ + public Builder allowJaggedRows(Boolean allowJaggedRows) { + this.allowJaggedRows = allowJaggedRows; + return this; + } + + /** + * Sets whether BigQuery should allow quoted data sections that contain newline characters in a + * CSV file. By default quoted newline are not allowed. + */ + public Builder allowQuotedNewLines(Boolean allowQuotedNewLines) { + this.allowQuotedNewLines = allowQuotedNewLines; + return this; + } + + /** + * Sets the character encoding of the data. The supported values are UTF-8 or ISO-8859-1. The + * default value is UTF-8. BigQuery decodes the data after the raw, binary data has been split + * using the values set in {@link #quote(String)} and {@link #fieldDelimiter(String)}. + */ + public Builder encoding(String encoding) { + this.encoding = encoding; + return this; + } + + /** + * Sets the character encoding of the data. The supported values are UTF-8 or ISO-8859-1. The + * default value is UTF-8. BigQuery decodes the data after the raw, binary data has been split + * using the values set in {@link #quote(String)} and {@link #fieldDelimiter(String)}. + */ + public Builder encoding(Charset encoding) { + this.encoding = encoding.name(); + return this; + } + + /** + * Sets the separator for fields in a CSV file. BigQuery converts the string to ISO-8859-1 + * encoding, and then uses the first byte of the encoded string to split the data in its raw, + * binary state. BigQuery also supports the escape sequence "\t" to specify a tab separator. + * The default value is a comma (','). + */ + public Builder fieldDelimiter(String fieldDelimiter) { + this.fieldDelimiter = fieldDelimiter; + return this; + } + + /** + * Sets the value that is used to quote data sections in a CSV file. BigQuery converts the + * string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split + * the data in its raw, binary state. The default value is a double-quote ('"'). If your data + * does not contain quoted sections, set the property value to an empty string. If your data + * contains quoted newline characters, you must also set {@link #allowQuotedNewLines(Boolean)} + * property to {@code true}. + */ + public Builder quote(String quote) { + this.quote = quote; + return this; + } + + /** + * Sets the number of rows at the top of a CSV file that BigQuery will skip when reading the + * data. The default value is 0. This property is useful if you have header rows in the file + * that should be skipped. + */ + public Builder skipLeadingRows(Integer skipLeadingRows) { + this.skipLeadingRows = skipLeadingRows; + return this; + } + + /** + * Creates a {@code CsvOptions} object. + */ + public CsvOptions build() { + return new CsvOptions(this); + } + } + + private CsvOptions(Builder builder) { + super(FormatOptions.CSV); + this.allowJaggedRows = builder.allowJaggedRows; + this.allowQuotedNewLines = builder.allowQuotedNewLines; + this.encoding = builder.encoding; + this.fieldDelimiter = builder.fieldDelimiter; + this.quote = builder.quote; + this.skipLeadingRows = builder.skipLeadingRows; + } + + /** + * Returns whether BigQuery should accept rows that are missing trailing optional columns. If + * {@code true}, BigQuery treats missing trailing columns as null values. If {@code false}, + * records with missing trailing columns are treated as bad records, and if the number of bad + * records exceeds {@link ExternalDataConfiguration#maxBadRecords()}, an invalid error is returned + * in the job result. + */ + public Boolean allowJaggedRows() { + return allowJaggedRows; + } + + /** + * Returns whether BigQuery should allow quoted data sections that contain newline characters in a + * CSV file. + */ + public Boolean allowQuotedNewLines() { + return allowQuotedNewLines; + } + + /** + * Returns the character encoding of the data. The supported values are UTF-8 or ISO-8859-1. If + * not set, UTF-8 is used. BigQuery decodes the data after the raw, binary data has been split + * using the values set in {@link #quote()} and {@link #fieldDelimiter()}. + */ + public String encoding() { + return encoding; + } + + /** + * Returns the separator for fields in a CSV file. + */ + public String fieldDelimiter() { + return fieldDelimiter; + } + + /** + * Returns the value that is used to quote data sections in a CSV file. + */ + public String quote() { + return quote; + } + + /** + * Returns the number of rows at the top of a CSV file that BigQuery will skip when reading the + * data. + */ + public Integer skipLeadingRows() { + return skipLeadingRows; + } + + /** + * Returns a builder for the {@code CsvOptions} object. + */ + public Builder toBuilder() { + return new Builder() + .allowJaggedRows(allowJaggedRows) + .allowQuotedNewLines(allowQuotedNewLines) + .encoding(encoding) + .fieldDelimiter(fieldDelimiter) + .quote(quote) + .skipLeadingRows(skipLeadingRows); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("type", type()) + .add("allowJaggedRows", allowJaggedRows) + .add("allowQuotedNewLines", allowQuotedNewLines) + .add("encoding", encoding) + .add("fieldDelimiter", fieldDelimiter) + .add("quote", quote) + .add("skipLeadingRows", skipLeadingRows) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(type(), allowJaggedRows, allowQuotedNewLines, encoding, fieldDelimiter, + quote, skipLeadingRows); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof CsvOptions && Objects.equals(toPb(), ((CsvOptions) obj).toPb()); + } + + com.google.api.services.bigquery.model.CsvOptions toPb() { + com.google.api.services.bigquery.model.CsvOptions csvOptions = + new com.google.api.services.bigquery.model.CsvOptions(); + csvOptions.setAllowJaggedRows(allowJaggedRows); + csvOptions.setAllowQuotedNewlines(allowQuotedNewLines); + csvOptions.setEncoding(encoding); + csvOptions.setFieldDelimiter(fieldDelimiter); + csvOptions.setQuote(quote); + csvOptions.setSkipLeadingRows(skipLeadingRows); + return csvOptions; + } + + /** + * Returns a builder for a CsvOptions object. + */ + public static Builder builder() { + return new Builder(); + } + + static CsvOptions fromPb(com.google.api.services.bigquery.model.CsvOptions csvOptions) { + Builder builder = builder(); + if (csvOptions.getAllowJaggedRows() != null) { + builder.allowJaggedRows(csvOptions.getAllowJaggedRows()); + } + if (csvOptions.getAllowQuotedNewlines() != null) { + builder.allowQuotedNewLines(csvOptions.getAllowQuotedNewlines()); + } + if (csvOptions.getEncoding() != null) { + builder.encoding(csvOptions.getEncoding()); + } + if (csvOptions.getFieldDelimiter() != null) { + builder.fieldDelimiter(csvOptions.getFieldDelimiter()); + } + if (csvOptions.getQuote() != null) { + builder.quote(csvOptions.getQuote()); + } + if (csvOptions.getSkipLeadingRows() != null) { + builder.skipLeadingRows(csvOptions.getSkipLeadingRows()); + } + return builder.build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetId.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetId.java new file mode 100644 index 000000000000..942322ea51d3 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetId.java @@ -0,0 +1,93 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.DatasetReference; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Google BigQuery Dataset identity. + */ +public class DatasetId implements Serializable { + + private static final long serialVersionUID = -6186254820908152300L; + + private final String project; + private final String dataset; + + /** + * Returns project's user-defined id. + */ + public String project() { + return project; + } + + /** + * Returns dataset's user-defined id. + */ + public String dataset() { + return dataset; + } + + private DatasetId(String project, String dataset) { + this.project = project; + this.dataset = dataset; + } + + /** + * Creates a dataset identity given project's and dataset's user-defined ids. + */ + public static DatasetId of(String project, String dataset) { + return new DatasetId(checkNotNull(project), checkNotNull(dataset)); + } + + /** + * Creates a dataset identity given only its user-defined id. + */ + public static DatasetId of(String dataset) { + return new DatasetId(null, checkNotNull(dataset)); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof DatasetId && Objects.equals(toPb(), ((DatasetId) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(project, dataset); + } + + @Override + public String toString() { + return toPb().toString(); + } + + DatasetReference toPb() { + return new DatasetReference().setProjectId(project).setDatasetId(dataset); + } + + static DatasetId fromPb(DatasetReference datasetRef) { + return new DatasetId( + datasetRef.getProjectId(), + datasetRef.getDatasetId()); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetInfo.java new file mode 100644 index 000000000000..95897ba3a801 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/DatasetInfo.java @@ -0,0 +1,421 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.MoreObjects.firstNonNull; +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.client.util.Data; +import com.google.api.services.bigquery.model.Dataset; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery Dataset information. A dataset is a grouping mechanism that holds zero or more + * tables. Datasets are the lowest level unit of access control; you cannot control access at the + * table level. + * + * @see + * Managing Jobs, Datasets, and Projects + */ +public final class DatasetInfo implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public DatasetInfo apply(Dataset pb) { + return DatasetInfo.fromPb(pb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public Dataset apply(DatasetInfo datasetInfo) { + return datasetInfo.toPb(); + } + }; + + private static final long serialVersionUID = -6615133444520365839L; + + private final DatasetId datasetId; + private final List acl; + private final Long creationTime; + private final Long defaultTableLifetime; + private final String description; + private final String etag; + private final String friendlyName; + private final String id; + private final Long lastModified; + private final String location; + private final String selfLink; + + public static final class Builder { + + private DatasetId datasetId; + private List acl; + private Long creationTime; + private Long defaultTableLifetime; + private String description; + private String etag; + private String friendlyName; + private String id; + private Long lastModified; + private String location; + private String selfLink; + + private Builder() {} + + private Builder(DatasetInfo datasetInfo) { + this.datasetId = datasetInfo.datasetId; + this.acl = datasetInfo.acl; + this.creationTime = datasetInfo.creationTime; + this.defaultTableLifetime = datasetInfo.defaultTableLifetime; + this.description = datasetInfo.description; + this.etag = datasetInfo.etag; + this.friendlyName = datasetInfo.friendlyName; + this.id = datasetInfo.id; + this.lastModified = datasetInfo.lastModified; + this.location = datasetInfo.location; + this.selfLink = datasetInfo.selfLink; + } + + /** + * Sets the dataset identity. + */ + public Builder datasetId(DatasetId datasetId) { + this.datasetId = checkNotNull(datasetId); + return this; + } + + /** + * Sets the dataset's access control configuration. + * + * @see Access Control + */ + public Builder acl(List acl) { + this.acl = acl != null ? ImmutableList.copyOf(acl) : null; + return this; + } + + Builder creationTime(Long creationTime) { + this.creationTime = creationTime; + return this; + } + + /** + * Sets the default lifetime of all tables in the dataset, in milliseconds. The minimum value is + * 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the + * dataset will have an expirationTime property set to the creation time plus the value in this + * property, and changing the value will only affect new tables, not existing ones. When the + * expirationTime for a given table is reached, that table will be deleted automatically. If a + * table's expirationTime is modified or removed before the table expires, or if you provide an + * explicit expirationTime when creating a table, that value takes precedence over the default + * expiration time indicated by this property. This property is experimental and might be + * subject to change or removed. + */ + public Builder defaultTableLifetime(Long defaultTableLifetime) { + this.defaultTableLifetime = + firstNonNull(defaultTableLifetime, Data.nullOf(Long.class)); + return this; + } + + /** + * Sets a user-friendly description for the dataset. + */ + public Builder description(String description) { + this.description = firstNonNull(description, Data.nullOf(String.class)); + return this; + } + + Builder etag(String etag) { + this.etag = etag; + return this; + } + + /** + * Sets a user-friendly name for the dataset. + */ + public Builder friendlyName(String friendlyName) { + this.friendlyName = firstNonNull(friendlyName, Data.nullOf(String.class)); + return this; + } + + Builder id(String id) { + this.id = id; + return this; + } + + Builder lastModified(Long lastModified) { + this.lastModified = lastModified; + return this; + } + + /** + * Sets the geographic location where the dataset should reside. This property is experimental + * and might be subject to change or removed. + * + * @see Dataset + * Location + */ + public Builder location(String location) { + this.location = firstNonNull(location, Data.nullOf(String.class)); + return this; + } + + Builder selfLink(String selfLink) { + this.selfLink = selfLink; + return this; + } + + /** + * Creates a {@code DatasetInfo} object. + */ + public DatasetInfo build() { + return new DatasetInfo(this); + } + } + + private DatasetInfo(Builder builder) { + datasetId = checkNotNull(builder.datasetId); + acl = builder.acl; + creationTime = builder.creationTime; + defaultTableLifetime = builder.defaultTableLifetime; + description = builder.description; + etag = builder.etag; + friendlyName = builder.friendlyName; + id = builder.id; + lastModified = builder.lastModified; + location = builder.location; + selfLink = builder.selfLink; + } + + /** + * Returns the dataset identity. + */ + public DatasetId datasetId() { + return datasetId; + } + + /** + * Returns the dataset's access control configuration. + * + * @see Access Control + */ + public List acl() { + return acl; + } + + /** + * Returns the time when this dataset was created, in milliseconds since the epoch. + */ + public Long creationTime() { + return creationTime; + } + + /** + * Returns the default lifetime of all tables in the dataset, in milliseconds. Once this property + * is set, all newly-created tables in the dataset will have an expirationTime property set to the + * creation time plus the value in this property, and changing the value will only affect new + * tables, not existing ones. When the expirationTime for a given table is reached, that table + * will be deleted automatically. If a table's expirationTime is modified or removed before the + * table expires, or if you provide an explicit expirationTime when creating a table, that value + * takes precedence over the default expiration time indicated by this property. + */ + public Long defaultTableLifetime() { + return defaultTableLifetime; + } + + /** + * Returns a user-friendly description for the dataset. + */ + public String description() { + return description; + } + + /** + * Returns the hash of the dataset resource. + */ + public String etag() { + return etag; + } + + /** + * Returns a user-friendly name for the dataset. + */ + public String friendlyName() { + return friendlyName; + } + + /** + * Returns an opaque id for the dataset. + */ + public String id() { + return id; + } + + /** + * Returns the time when this dataset or any of its tables was last modified, in milliseconds + * since the epoch. + */ + public Long lastModified() { + return lastModified; + } + + /** + * Returns the geographic location where the dataset should reside. + * + * @see + * Dataset Location + */ + public String location() { + return location; + } + + /** + * Returns an URL that can be used to access the resource again. The returned URL can be used for + * get or update requests. + */ + public String selfLink() { + return selfLink; + } + + /** + * Returns a builder for the {@code DatasetInfo} object. + */ + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("datasetId", datasetId) + .add("creationTime", creationTime) + .add("defaultTableLifetime", defaultTableLifetime) + .add("description", description) + .add("etag", etag) + .add("friendlyName", friendlyName) + .add("id", id) + .add("lastModified", lastModified) + .add("location", location) + .add("selfLink", selfLink) + .add("acl", acl) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(datasetId); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof DatasetInfo && Objects.equals(toPb(), ((DatasetInfo) obj).toPb()); + } + + Dataset toPb() { + Dataset datasetPb = new Dataset(); + datasetPb.setDatasetReference(datasetId.toPb()); + datasetPb.setCreationTime(creationTime); + datasetPb.setDefaultTableExpirationMs(defaultTableLifetime); + datasetPb.setDescription(description); + datasetPb.setEtag(etag); + datasetPb.setFriendlyName(friendlyName); + datasetPb.setId(id); + datasetPb.setLastModifiedTime(lastModified); + datasetPb.setLocation(location); + datasetPb.setSelfLink(selfLink); + if (acl != null) { + datasetPb.setAccess(Lists.transform(acl, new Function() { + @Override + public Dataset.Access apply(Acl acl) { + return acl.toPb(); + } + })); + } + return datasetPb; + } + + /** + * Returns a builder for the DatasetInfo object given it's user-defined id. + */ + public static Builder builder(String datasetId) { + return new Builder().datasetId(DatasetId.of(datasetId)); + } + + /** + * Returns a builder for the DatasetInfo object given it's project and user-defined id. + */ + public static Builder builder(String projectId, String datasetId) { + return new Builder().datasetId(DatasetId.of(projectId, datasetId)); + } + + /** + * Returns a builder for the DatasetInfo object given it's identity. + */ + public static Builder builder(DatasetId datasetId) { + return new Builder().datasetId(datasetId); + } + + static DatasetInfo fromPb(Dataset datasetPb) { + Builder builder = builder(datasetPb.getDatasetReference().getProjectId(), + datasetPb.getDatasetReference().getDatasetId()); + if (datasetPb.getAccess() != null) { + builder.acl(Lists.transform(datasetPb.getAccess(), + new Function() { + @Override + public Acl apply(Dataset.Access accessPb) { + return Acl.fromPb(accessPb); + } + })); + } + if (datasetPb.getCreationTime() != null) { + builder.creationTime(datasetPb.getCreationTime()); + } + if (datasetPb.getDefaultTableExpirationMs() != null) { + builder.defaultTableLifetime(datasetPb.getDefaultTableExpirationMs()); + } + if (datasetPb.getDescription() != null) { + builder.description(datasetPb.getDescription()); + } + if (datasetPb.getEtag() != null) { + builder.etag(datasetPb.getEtag()); + } + if (datasetPb.getFriendlyName() != null) { + builder.friendlyName(datasetPb.getFriendlyName()); + } + if (datasetPb.getId() != null) { + builder.id(datasetPb.getId()); + } + if (datasetPb.getLastModifiedTime() != null) { + builder.lastModified(datasetPb.getLastModifiedTime()); + } + if (datasetPb.getLocation() != null) { + builder.location(datasetPb.getLocation()); + } + if (datasetPb.getSelfLink() != null) { + builder.selfLink(datasetPb.getSelfLink()); + } + return builder.build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExternalDataConfiguration.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExternalDataConfiguration.java new file mode 100644 index 000000000000..4344aeba186b --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExternalDataConfiguration.java @@ -0,0 +1,397 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery configuration for tables backed by external data. Objects of this class describe + * the data format, location, and other properties of a table stored outside of BigQuery. + * By defining these properties, the data source can then be queried as if it were a standard + * BigQuery table. Support for external tables is experimental and might be subject to changes or + * removed. + * + * @see Federated Data Sources + * + */ +public class ExternalDataConfiguration implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public ExternalDataConfiguration apply( + com.google.api.services.bigquery.model.ExternalDataConfiguration configurationPb) { + return ExternalDataConfiguration.fromPb(configurationPb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public com.google.api.services.bigquery.model.ExternalDataConfiguration apply( + ExternalDataConfiguration configuration) { + return configuration.toPb(); + } + }; + + private static final long serialVersionUID = -8004288831035566549L; + + private final List sourceUris; + private final Schema schema; + private final FormatOptions formatOptions; + private final Integer maxBadRecords; + private final Boolean ignoreUnknownValues; + private final String compression; + + public static final class Builder { + + private List sourceUris; + private Schema schema; + private FormatOptions formatOptions; + private Integer maxBadRecords; + private Boolean ignoreUnknownValues; + private String compression; + + private Builder() {} + + /** + * Sets the fully-qualified URIs that point to your data in Google Cloud Storage (e.g. + * gs://bucket/path). Each URI can contain one '*' wildcard character that must come after the + * bucket's name. Size limits related to load jobs apply to external data sources, plus an + * additional limit of 10 GB maximum size across all URIs. + * + * @see Quota + */ + public Builder sourceUris(List sourceUris) { + this.sourceUris = ImmutableList.copyOf(checkNotNull(sourceUris)); + return this; + } + + /** + * Sets the schema for the external data. + */ + public Builder schema(Schema schema) { + this.schema = checkNotNull(schema); + return this; + } + + /** + * Sets the source format, and possibly some parsing options, of the external data. Supported + * formats are {@code CSV} and {@code NEWLINE_DELIMITED_JSON}. + * + * + * Source Format + */ + public Builder formatOptions(FormatOptions formatOptions) { + this.formatOptions = checkNotNull(formatOptions); + return this; + } + + /** + * Sets the maximum number of bad records that BigQuery can ignore when reading data. If the + * number of bad records exceeds this value, an invalid error is returned in the job result. + * The default value is 0, which requires that all records are valid. + */ + public Builder maxBadRecords(Integer maxBadRecords) { + this.maxBadRecords = maxBadRecords; + return this; + } + + /** + * Sets whether BigQuery should allow extra values that are not represented in the table schema. + * If true, the extra values are ignored. If false, records with extra columns are treated as + * bad records, and if there are too many bad records, an invalid error is returned in the job + * result. The default value is false. The value set with {@link #formatOptions(FormatOptions)} + * property determines what BigQuery treats as an extra value. + * + * @see + * Ignore Unknown Values + */ + public Builder ignoreUnknownValues(Boolean ignoreUnknownValues) { + this.ignoreUnknownValues = ignoreUnknownValues; + return this; + } + + /** + * Sets compression type of the data source. By default no compression is assumed. + * + * @see + * Compression + */ + public Builder compression(String compression) { + this.compression = compression; + return this; + } + + /** + * Creates an {@code ExternalDataConfiguration} object. + */ + public ExternalDataConfiguration build() { + return new ExternalDataConfiguration(this); + } + } + + ExternalDataConfiguration(Builder builder) { + this.compression = builder.compression; + this.ignoreUnknownValues = builder.ignoreUnknownValues; + this.maxBadRecords = builder.maxBadRecords; + this.schema = builder.schema; + this.formatOptions = builder.formatOptions; + this.sourceUris = builder.sourceUris; + } + + /** + * Returns the compression type of the data source. + * + * @see + * Compression + */ + public String compression() { + return compression; + } + + /** + * Returns whether BigQuery should allow extra values that are not represented in the table + * schema. If true, the extra values are ignored. If false, records with extra columns are treated + * as bad records, and if there are too many bad records, an invalid error is returned in the job + * result. The default value is false. The value of {@link #formatOptions()} determines what + * BigQuery treats as an extra value. + * + * @see + * Ignore Unknown Values + */ + public Boolean ignoreUnknownValues() { + return ignoreUnknownValues; + } + + /** + * Returns the maximum number of bad records that BigQuery can ignore when reading data. If the + * number of bad records exceeds this value, an invalid error is returned in the job result. + */ + public Integer maxBadRecords() { + return maxBadRecords; + } + + /** + * Returns the schema for the external data. + */ + public Schema schema() { + return schema; + } + + /** + * Returns the fully-qualified URIs that point to your data in Google Cloud Storage. Each URI can + * contain one '*' wildcard character that must come after the bucket's name. Size limits + * related to load jobs apply to external data sources, plus an additional limit of 10 GB + * maximum size across all URIs. + * + * @see Quota + */ + public List sourceUris() { + return sourceUris; + } + + /** + * Returns the source format, and possibly some parsing options, of the external data. Supported + * formats are {@code CSV} and {@code NEWLINE_DELIMITED_JSON}. + */ + @SuppressWarnings("unchecked") + public F formatOptions() { + return (F) formatOptions; + } + + /** + * Returns a builder for the {@code ExternalDataConfiguration} object. + */ + public Builder toBuilder() { + return new Builder() + .compression(compression) + .ignoreUnknownValues(ignoreUnknownValues) + .maxBadRecords(maxBadRecords) + .schema(schema) + .formatOptions(formatOptions) + .sourceUris(sourceUris); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("sourceUris", sourceUris) + .add("formatOptions", formatOptions) + .add("schema", schema) + .add("compression", compression) + .add("ignoreUnknownValues", ignoreUnknownValues) + .add("maxBadRecords", maxBadRecords) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(compression, ignoreUnknownValues, maxBadRecords, schema, formatOptions, + sourceUris); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof ExternalDataConfiguration + && Objects.equals(toPb(), ((ExternalDataConfiguration) obj).toPb()); + } + + com.google.api.services.bigquery.model.ExternalDataConfiguration toPb() { + com.google.api.services.bigquery.model.ExternalDataConfiguration externalConfigurationPb = + new com.google.api.services.bigquery.model.ExternalDataConfiguration(); + if (compression != null) { + externalConfigurationPb.setCompression(compression); + } + if (ignoreUnknownValues != null) { + externalConfigurationPb.setIgnoreUnknownValues(ignoreUnknownValues); + } + if (maxBadRecords != null) { + externalConfigurationPb.setMaxBadRecords(maxBadRecords); + } + if (schema != null) { + externalConfigurationPb.setSchema(schema.toPb()); + } + if (formatOptions != null) { + externalConfigurationPb.setSourceFormat(formatOptions.type()); + } + if (sourceUris != null) { + externalConfigurationPb.setSourceUris(sourceUris); + } + if (formatOptions != null && FormatOptions.CSV.equals(formatOptions.type())) { + externalConfigurationPb.setCsvOptions(((CsvOptions) formatOptions).toPb()); + } + return externalConfigurationPb; + } + + /** + * Creates a builder for an ExternalDataConfiguration object. + * + * @param sourceUris the fully-qualified URIs that point to your data in Google Cloud Storage. + * Each URI can contain one '*' wildcard character that must come after the bucket's name. + * Size limits related to load jobs apply to external data sources, plus an additional limit + * of 10 GB maximum size across all URIs. + * @param schema the schema for the external data + * @param format the source format of the external data + * @return a builder for an ExternalDataConfiguration object given source URIs, schema and format + * + * @see Quota + * @see + * Source Format + */ + public static Builder builder(List sourceUris, Schema schema, FormatOptions format) { + return new Builder().sourceUris(sourceUris).schema(schema).formatOptions(format); + } + + /** + * Creates a builder for an ExternalDataConfiguration object. + * + * @param sourceUri a fully-qualified URI that points to your data in Google Cloud Storage. The + * URI can contain one '*' wildcard character that must come after the bucket's name. Size + * limits related to load jobs apply to external data sources. + * @param schema the schema for the external data + * @param format the source format of the external data + * @return a builder for an ExternalDataConfiguration object given source URI, schema and format + * + * @see Quota + * @see + * Source Format + */ + public static Builder builder(String sourceUri, Schema schema, FormatOptions format) { + return new Builder() + .sourceUris(ImmutableList.of(sourceUri)) + .schema(schema) + .formatOptions(format); + } + + /** + * Creates an ExternalDataConfiguration object. + * + * @param sourceUris the fully-qualified URIs that point to your data in Google Cloud Storage. + * Each URI can contain one '*' wildcard character that must come after the bucket's name. + * Size limits related to load jobs apply to external data sources, plus an additional limit + * of 10 GB maximum size across all URIs. + * @param schema the schema for the external data + * @param format the source format of the external data + * @return an ExternalDataConfiguration object given source URIs, schema and format + * + * @see Quota + * @see + * Source Format + */ + public static ExternalDataConfiguration of(List sourceUris, Schema schema, + FormatOptions format) { + return builder(sourceUris, schema, format).build(); + } + + /** + * Creates an ExternalDataConfiguration object. + * + * @param sourceUri a fully-qualified URI that points to your data in Google Cloud Storage. The + * URI can contain one '*' wildcard character that must come after the bucket's name. Size + * limits related to load jobs apply to external data sources. + * @param schema the schema for the external data + * @param format the source format of the external data + * @return an ExternalDataConfiguration object given source URIs, schema and format + * + * @see Quota + * @see + * Source Format + */ + public static ExternalDataConfiguration of(String sourceUri, Schema schema, + FormatOptions format) { + return builder(sourceUri, schema, format).build(); + } + + static ExternalDataConfiguration fromPb( + com.google.api.services.bigquery.model.ExternalDataConfiguration externalDataConfiguration) { + Builder builder = new Builder(); + if (externalDataConfiguration.getSourceUris() != null) { + builder.sourceUris(externalDataConfiguration.getSourceUris()); + } + if (externalDataConfiguration.getSchema() != null) { + builder.schema(Schema.fromPb(externalDataConfiguration.getSchema())); + } + if (externalDataConfiguration.getSourceFormat() != null) { + builder.formatOptions(FormatOptions.of(externalDataConfiguration.getSourceFormat())); + } + if (externalDataConfiguration.getCompression() != null) { + builder.compression(externalDataConfiguration.getCompression()); + } + if (externalDataConfiguration.getIgnoreUnknownValues() != null) { + builder.ignoreUnknownValues(externalDataConfiguration.getIgnoreUnknownValues()); + } + if (externalDataConfiguration.getCsvOptions() != null) { + builder.formatOptions(CsvOptions.fromPb(externalDataConfiguration.getCsvOptions())); + } + if (externalDataConfiguration.getMaxBadRecords() != null) { + builder.maxBadRecords(externalDataConfiguration.getMaxBadRecords()); + } + return builder.build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExternalTableInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExternalTableInfo.java new file mode 100644 index 000000000000..177f8a7db2b8 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExternalTableInfo.java @@ -0,0 +1,137 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.Table; +import com.google.common.base.MoreObjects.ToStringHelper; + +/** + * Google BigQuery External Table information. BigQuery's external tables are tables whose data + * reside outside of BigQuery but can be queried as normal BigQuery tables. External tables are + * experimental and might be subject to change or removed. + * + * @see Federated Data Sources + * + */ +public class ExternalTableInfo extends BaseTableInfo { + + private static final long serialVersionUID = -5893406738246214865L; + + private final ExternalDataConfiguration configuration; + + public static final class Builder extends BaseTableInfo.Builder { + + private ExternalDataConfiguration configuration; + + private Builder() {} + + private Builder(ExternalTableInfo tableInfo) { + super(tableInfo); + this.configuration = tableInfo.configuration; + } + + protected Builder(Table tablePb) { + super(tablePb); + if (tablePb.getExternalDataConfiguration() != null) { + this.configuration = + ExternalDataConfiguration.fromPb(tablePb.getExternalDataConfiguration()); + } + } + + /** + * Sets the data format, location and other properties of a table stored outside of BigQuery. + * + * @see Federated Data + * Sources + */ + public Builder configuration(ExternalDataConfiguration configuration) { + this.configuration = checkNotNull(configuration); + return self(); + } + + /** + * Creates a {@code ExternalTableInfo} object. + */ + @Override + public ExternalTableInfo build() { + return new ExternalTableInfo(this); + } + } + + private ExternalTableInfo(Builder builder) { + super(builder); + this.configuration = builder.configuration; + } + + /** + * Returns the data format, location and other properties of a table stored outside of BigQuery. + * This property is experimental and might be subject to change or removed. + * + * @see Federated Data Sources + * + */ + public ExternalDataConfiguration configuration() { + return configuration; + } + + /** + * Returns a builder for the {@code ExternalTableInfo} object. + */ + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper().add("configuration", configuration); + } + + @Override + Table toPb() { + Table tablePb = super.toPb(); + tablePb.setExternalDataConfiguration(configuration.toPb()); + return tablePb; + } + + /** + * Returns a builder for a BigQuery External Table. + * + * @param tableId table id + * @param configuration data format, location and other properties of an External Table + */ + public static Builder builder(TableId tableId, ExternalDataConfiguration configuration) { + return new Builder().tableId(tableId).type(Type.EXTERNAL).configuration(configuration); + } + + /** + * Returns a BigQuery External Table. + * + * @param table table id + * @param configuration data format, location and other properties of an External Table + */ + public static ExternalTableInfo of(TableId table, ExternalDataConfiguration configuration) { + return builder(table, configuration).build(); + } + + @SuppressWarnings("unchecked") + static ExternalTableInfo fromPb(Table tablePb) { + return new Builder(tablePb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExtractJobInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExtractJobInfo.java new file mode 100644 index 000000000000..268672b04d68 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ExtractJobInfo.java @@ -0,0 +1,286 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.Job; +import com.google.api.services.bigquery.model.JobConfiguration; +import com.google.api.services.bigquery.model.JobConfigurationExtract; +import com.google.common.base.MoreObjects.ToStringHelper; +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.JobStatistics.ExtractStatistics; + +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery Extract Jobs. An Extract Job exports a BigQuery table to Google Cloud Storage. + * The extract destination provided as URIs that point to objects in Google Cloud Storage. + */ +public class ExtractJobInfo extends JobInfo { + + private static final long serialVersionUID = -9126951217071361576L; + + private final TableId sourceTable; + private final List destinationUris; + private final Boolean printHeader; + private final String fieldDelimiter; + private final String format; + private final String compression; + + public static final class Builder extends JobInfo.Builder { + + private TableId sourceTable; + private List destinationUris; + private Boolean printHeader; + private String fieldDelimiter; + private String format; + private String compression; + + private Builder() {} + + private Builder(ExtractJobInfo jobInfo) { + super(jobInfo); + this.sourceTable = jobInfo.sourceTable; + this.destinationUris = jobInfo.destinationUris; + this.printHeader = jobInfo.printHeader; + this.fieldDelimiter = jobInfo.fieldDelimiter; + this.format = jobInfo.format; + this.compression = jobInfo.compression; + } + + private Builder(Job jobPb) { + super(jobPb); + JobConfigurationExtract extractConfigurationPb = jobPb.getConfiguration().getExtract(); + this.sourceTable = TableId.fromPb(extractConfigurationPb.getSourceTable()); + this.destinationUris = extractConfigurationPb.getDestinationUris(); + this.printHeader = extractConfigurationPb.getPrintHeader(); + this.fieldDelimiter = extractConfigurationPb.getFieldDelimiter(); + this.format = extractConfigurationPb.getDestinationFormat(); + this.compression = extractConfigurationPb.getCompression(); + } + + /** + * Sets the table to export. + */ + public Builder sourceTable(TableId sourceTable) { + this.sourceTable = sourceTable; + return self(); + } + + /** + * Sets the list of fully-qualified Google Cloud Storage URIs (e.g. gs://bucket/path) where the + * extracted table should be written. + */ + public Builder destinationUris(List destinationUris) { + this.destinationUris = destinationUris != null ? ImmutableList.copyOf(destinationUris) : null; + return self(); + } + + /** + * Sets whether to print out a header row in the results. By default an header is printed. + */ + public Builder printHeader(Boolean printHeader) { + this.printHeader = printHeader; + return self(); + } + + /** + * Sets the delimiter to use between fields in the exported data. By default "," is used. + */ + public Builder fieldDelimiter(String fieldDelimiter) { + this.fieldDelimiter = fieldDelimiter; + return self(); + } + + /** + * Sets the exported file format. If not set table is exported in CSV format. + * + * + * Destination Format + */ + public Builder format(String format) { + this.format = format; + return self(); + } + + /** + * Sets the compression value to use for exported files. If not set exported files are not + * compressed. + * + * + * Compression + */ + public Builder compression(String compression) { + this.compression = compression; + return self(); + } + + @Override + public ExtractJobInfo build() { + return new ExtractJobInfo(this); + } + } + + private ExtractJobInfo(Builder builder) { + super(builder); + this.sourceTable = checkNotNull(builder.sourceTable); + this.destinationUris = checkNotNull(builder.destinationUris); + this.printHeader = builder.printHeader; + this.fieldDelimiter = builder.fieldDelimiter; + this.format = builder.format; + this.compression = builder.compression; + } + + /** + * Returns the table to export. + */ + public TableId sourceTable() { + return sourceTable; + } + + /** + * Returns the list of fully-qualified Google Cloud Storage URIs where the extracted table should + * be written. + * + * @see + * Exporting Data Into One or More Files + */ + public List destinationUris() { + return destinationUris; + } + + /** + * Returns whether an header row is printed with the result. + */ + public Boolean printHeader() { + return printHeader; + } + + /** + * Returns the delimiter used between fields in the exported data. + */ + public String fieldDelimiter() { + return fieldDelimiter; + } + + /** + * Returns the exported files format. + */ + public String format() { + return format; + } + + /** + * Returns the compression value of exported files. + */ + public String compression() { + return compression; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("sourceTable", sourceTable) + .add("destinationUris", destinationUris) + .add("format", format) + .add("printHeader", printHeader) + .add("fieldDelimiter", fieldDelimiter) + .add("compression", compression); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof ExtractJobInfo && Objects.equals(toPb(), ((ExtractJobInfo) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), sourceTable, destinationUris, printHeader, fieldDelimiter, + format, compression); + } + + @Override + Job toPb() { + JobConfigurationExtract extractConfigurationPb = new JobConfigurationExtract(); + extractConfigurationPb.setDestinationUris(destinationUris); + extractConfigurationPb.setSourceTable(sourceTable.toPb()); + extractConfigurationPb.setPrintHeader(printHeader); + extractConfigurationPb.setFieldDelimiter(fieldDelimiter); + extractConfigurationPb.setDestinationFormat(format); + extractConfigurationPb.setCompression(compression); + return super.toPb().setConfiguration(new JobConfiguration().setExtract(extractConfigurationPb)); + } + + /** + * Creates a builder for a BigQuery Extract Job given source table and destination URI. + */ + public static Builder builder(TableId sourceTable, String destinationUri) { + return builder(sourceTable, ImmutableList.of(checkNotNull(destinationUri))); + } + + /** + * Creates a builder for a BigQuery Extract Job given source table and destination URIs. + */ + public static Builder builder(TableId sourceTable, List destinationUris) { + return new Builder().sourceTable(sourceTable).destinationUris(destinationUris); + } + + /** + * Returns a BigQuery Extract Job for the given source table and destination URI. Job's id is + * chosen by the service. + */ + public static ExtractJobInfo of(TableId sourceTable, String destinationUri) { + return builder(sourceTable, destinationUri).build(); + } + + /** + * Returns a BigQuery Extract Job for the given source table and destination URIs. Job's id is + * chosen by the service. + */ + public static ExtractJobInfo of(TableId sourceTable, List destinationUris) { + return builder(sourceTable, destinationUris).build(); + } + + /** + * Returns a BigQuery Extract Job for the given source table and destination URI. Job's id is set + * to the provided value. + */ + public static ExtractJobInfo of(JobId jobId, TableId sourceTable, String destinationUri) { + return builder(sourceTable, destinationUri).jobId(jobId).build(); + } + + /** + * Returns a BigQuery Extract Job for the given source table and destination URIs. Job's id is set + * to the provided value. + */ + public static ExtractJobInfo of(JobId jobId, TableId sourceTable, List destinationUris) { + return builder(sourceTable, destinationUris).jobId(jobId).build(); + } + + @SuppressWarnings("unchecked") + static ExtractJobInfo fromPb(Job jobPb) { + return new Builder(jobPb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Field.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Field.java new file mode 100644 index 000000000000..55fae44c5eed --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Field.java @@ -0,0 +1,375 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.MoreObjects.firstNonNull; +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.client.util.Data; +import com.google.api.services.bigquery.model.TableFieldSchema; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery Table field. A table field has a name, a value, a mode and possibly a + * description. Supported types are: {@link Type#integer()}, {@link Type#bool()}, + * {@link Type#string()}, {@link Type#floatingPoint()}, {@link Type#timestamp()} and + * {@link Type#record(Field...)}. One or more fields form a table's schema. + */ +public class Field implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public Field apply(TableFieldSchema pb) { + return Field.fromPb(pb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public TableFieldSchema apply(Field field) { + return field.toPb(); + } + }; + + private static final long serialVersionUID = -8154262932305199256L; + + /** + * Data Types for a BigQuery Table field. This class provides factory methods for all BigQuery + * field types. To instantiate a RECORD value the list of sub-fields must be provided. + * + * @see + * Data Types + */ + public static class Type implements Serializable { + + private static final long serialVersionUID = 2841484762609576959L; + + public enum Value { + STRING, INTEGER, FLOAT, BOOLEAN, TIMESTAMP, RECORD + } + + private final Value value; + private final List fields; + + private Type(Value value) { + this.value = checkNotNull(value); + this.fields = null; + } + + private Type(Value value, List fields) { + checkArgument(fields.size() > 0, "Record must have at least one field"); + this.value = value; + this.fields = fields; + } + + /** + * Returns the value identifier. + * + * @see + * Data Types + */ + public Value value() { + return value; + } + + /** + * Returns the list of sub-fields if {@link #value()} is set to {@link Value#RECORD}. Returns + * {@code null} otherwise. + */ + public List fields() { + return fields; + } + + /** + * Returns a {@link Value#STRING} field value. + */ + public static Type string() { + return new Type(Value.STRING); + } + + /** + * Returns an {@link Value#INTEGER} field value. + */ + public static Type integer() { + return new Type(Value.INTEGER); + } + + /** + * Returns a {@link Value#FLOAT} field value. + */ + public static Type floatingPoint() { + return new Type(Value.FLOAT); + } + + /** + * Returns a {@link Value#BOOLEAN} field value. + */ + public static Type bool() { + return new Type(Value.BOOLEAN); + } + + /** + * Returns a {@link Value#TIMESTAMP} field value. + */ + public static Type timestamp() { + return new Type(Value.TIMESTAMP); + } + + /** + * Returns a {@link Value#RECORD} field value with associated list of sub-fields. + */ + public static Type record(Field... fields) { + return new Type(Value.RECORD, ImmutableList.copyOf(fields)); + } + + /** + * Returns a {@link Value#RECORD} field value with associated list of sub-fields. + */ + public static Type record(List fields) { + return new Type(Value.RECORD, ImmutableList.copyOf(checkNotNull(fields))); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("value", value) + .add("fields", fields) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(value, fields); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Type)) { + return false; + } + Type other = (Type) obj; + return Objects.equals(value, other.value) + && Objects.equals(fields, other.fields); + } + } + + /** + * Mode for a BigQuery Table field. {@link Mode#NULLABLE} fields can be set to {@code null}, + * {@link Mode#REQUIRED} fields must be provided. {@link Mode#REPEATED} fields can contain more + * than one value. + */ + public enum Mode { + NULLABLE, REQUIRED, REPEATED + } + + private final String name; + private final Type type; + private final String mode; + private final String description; + + public static final class Builder { + + private String name; + private Type type; + private String mode; + private String description; + + private Builder() {} + + private Builder(Field field) { + this.name = field.name; + this.type = field.type; + this.mode = field.mode; + this.description = field.description; + } + + /** + * Sets the field name. The name must contain only letters (a-z, A-Z), numbers (0-9), or + * underscores (_), and must start with a letter or underscore. The maximum length is 128 + * characters. + */ + public Builder name(String name) { + this.name = checkNotNull(name); + return this; + } + + /** + * Sets the value of the field. + * + * @see + * Data Types + */ + public Builder type(Type type) { + this.type = checkNotNull(type); + return this; + } + + /** + * Sets the mode of the field. When not specified {@link Mode#NULLABLE} is used. + */ + public Builder mode(Mode mode) { + this.mode = mode != null ? mode.name() : Data.nullOf(String.class); + return this; + } + + /** + * Sets the field description. The maximum length is 16K characters. + */ + public Builder description(String description) { + this.description = firstNonNull(description, Data.nullOf(String.class)); + return this; + } + + /** + * Creates a {@code Field} object. + */ + public Field build() { + return new Field(this); + } + } + + private Field(Builder builder) { + this.name = checkNotNull(builder.name); + this.type = checkNotNull(builder.type); + this.mode = builder.mode; + this.description = builder.description; + } + + /** + * Returns the field name. + */ + public String name() { + return name; + } + + /** + * Returns the field value. + * + * @see + * Data Types + */ + public Type type() { + return type; + } + + /** + * Returns the field mode. By default {@link Mode#NULLABLE} is used. + */ + public Mode mode() { + return mode != null ? Mode.valueOf(mode) : null; + } + + /** + * Returns the field description. + */ + public String description() { + return Data.isNull(description) ? null : description; + } + + /** + * Returns the list of sub-fields if {@link #type()} is a {@link Type.Value#RECORD}. Returns + * {@code null} otherwise. + */ + public List fields() { + return type.fields(); + } + + /** + * Returns a builder for the {@code Field} object. + */ + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("name", name) + .add("value", type) + .add("mode", mode) + .add("description", description) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(name, type, mode, description); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof Field && Objects.equals(toPb(), ((Field) obj).toPb()); + } + + TableFieldSchema toPb() { + TableFieldSchema fieldSchemaPb = new TableFieldSchema(); + fieldSchemaPb.setName(name); + fieldSchemaPb.setType(type.value().name()); + if (mode != null) { + fieldSchemaPb.setMode(mode); + } + if (description != null) { + fieldSchemaPb.setDescription(description); + } + if (fields() != null) { + List fieldsPb = Lists.transform(fields(), TO_PB_FUNCTION); + fieldSchemaPb.setFields(fieldsPb); + } + return fieldSchemaPb; + } + + /** + * Returns a Field object with given name and value. + */ + public static Field of(String name, Type type) { + return builder(name, type).build(); + } + + /** + * Returns a builder for a Field object with given name and value. + */ + public static Builder builder(String name, Type type) { + return new Builder().name(name).type(type); + } + + static Field fromPb(TableFieldSchema fieldSchemaPb) { + Builder fieldBuilder = new Builder(); + fieldBuilder.name(fieldSchemaPb.getName()); + Type.Value enumValue = Type.Value.valueOf(fieldSchemaPb.getType()); + if (fieldSchemaPb.getMode() != null) { + fieldBuilder.mode(Mode.valueOf(fieldSchemaPb.getMode())); + } + if (fieldSchemaPb.getDescription() != null) { + fieldBuilder.description(fieldSchemaPb.getDescription()); + } + if (fieldSchemaPb.getFields() != null) { + fieldBuilder.type(Type.record(Lists.transform(fieldSchemaPb.getFields(), FROM_PB_FUNCTION))); + } else { + fieldBuilder.type(new Type(enumValue)); + } + return fieldBuilder.build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/FieldValue.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/FieldValue.java new file mode 100644 index 000000000000..24c4b28b7613 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/FieldValue.java @@ -0,0 +1,266 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; +import static com.google.common.base.Preconditions.checkState; + +import com.google.api.client.util.Data; +import com.google.api.client.util.Lists; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; + +import java.io.Serializable; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Google BigQuery Table Field Value class. Objects of this class represent values of a BigQuery + * Table Field. A list of values forms a table row. Tables rows can be gotten as the result of a + * query or when listing table data. + */ +public class FieldValue implements Serializable { + + static final Function FROM_PB_FUNCTION = new Function() { + @Override + public FieldValue apply(Object pb) { + return FieldValue.fromPb(pb); + } + }; + private static final int MICROSECONDS = 1000000; + private static final long serialVersionUID = 469098630191710061L; + + private final Attribute attribute; + private final Object value; + + /** + * The field value's attribute, giving information on the field's content type. + */ + public enum Attribute { + /** + * A primitive field value. A {@code FieldValue} is primitive when the corresponding field has + * type {@link Field.Type#bool()}, {@link Field.Type#string()}, + * {@link Field.Type#floatingPoint()}, {@link Field.Type#integer()}, + * {@link Field.Type#timestamp()} or the value is set to {@code null}. + */ + PRIMITIVE, + + /** + * A {@code FieldValue} for a field with {@link Field.Mode#REPEATED} mode. + */ + REPEATED, + + /** + * A {@code FieldValue} for a field of type {@link Field.Type#record(Field...)}. + */ + RECORD + } + + FieldValue(Attribute attribute, Object value) { + this.attribute = attribute; + this.value = value; + } + + /** + * Returns the attribute of this Field Value. + * + * @return {@link Attribute#PRIMITIVE} if the field is a primitive type + * ({@link Field.Type#bool()}, {@link Field.Type#string()}, + * {@link Field.Type#floatingPoint()}, {@link Field.Type#integer()}, + * {@link Field.Type#timestamp()}) or is {@code null}. Returns {@link Attribute#REPEATED} if + * the corresponding field has ({@link Field.Mode#REPEATED}) mode. Returns + * {@link Attribute#RECORD} if the corresponding field is a + * {@link Field.Type#record(Field...)} type. + */ + public Attribute attribute() { + return attribute; + } + + /** + * Returns {@code true} if this field's value is {@code null}, {@code false} otherwise. + */ + public boolean isNull() { + return value == null; + } + + /** + * Returns this field's value as an {@link Object}. If {@link #isNull()} is {@code true} this + * method returns {@code null}. + */ + public Object value() { + return value; + } + + /** + * Returns this field's value as a {@link String}. This method should only be used if the + * corresponding field has primitive type ({@link Field.Type#bool()}, {@link Field.Type#string()}, + * {@link Field.Type#floatingPoint()}, {@link Field.Type#integer()}, + * {@link Field.Type#timestamp()}). + * + * @throws ClassCastException if the field is not a primitive type + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public String stringValue() { + checkNotNull(value); + return (String) value; + } + + /** + * Returns this field's value as a {@code long}. This method should only be used if the + * corresponding field has {@link Field.Type#integer()} type. + * + * @throws ClassCastException if the field is not a primitive type + * @throws NumberFormatException if the field's value could not be converted to {@link Integer} + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public long longValue() { + return Long.parseLong(stringValue()); + } + + /** + * Returns this field's value as a {@link Double}. This method should only be used if the + * corresponding field has {@link Field.Type#floatingPoint()} type. + * + * @throws ClassCastException if the field is not a primitive type + * @throws NumberFormatException if the field's value could not be converted to {@link Double} + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public double doubleValue() { + return Double.parseDouble(stringValue()); + } + + /** + * Returns this field's value as a {@link Boolean}. This method should only be used if the + * corresponding field has {@link Field.Type#bool()} type. + * + * @throws ClassCastException if the field is not a primitive type + * @throws IllegalStateException if the field's value could not be converted to {@link Boolean} + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public boolean booleanValue() { + String stringValue = stringValue(); + checkState(stringValue.equalsIgnoreCase("true") || stringValue.equalsIgnoreCase("false"), + "Field value is not of boolean type"); + return Boolean.parseBoolean(stringValue); + } + + /** + * Returns this field's value as a {@code long}, representing a timestamp in microseconds since + * epoch (UNIX time). This method should only be used if the corresponding field has + * {@link Field.Type#timestamp()} type. + * + * @throws ClassCastException if the field is not a primitive type + * @throws NumberFormatException if the field's value could not be converted to {@link Long} + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public long timestampValue() { + // timestamps are encoded in the format 1408452095.22 where the integer part is seconds since + // epoch (e.g. 1408452095.22 == 2014-08-19 07:41:35.220 -05:00) + return new Double(((Double.valueOf(stringValue())) * MICROSECONDS)).longValue(); + } + + /** + * Returns this field's value as a list of {@link FieldValue}. This method should only be used if + * the corresponding field has {@link Field.Mode#REPEATED} mode (i.e. {@link #attribute()} is + * {@link Attribute#REPEATED}). + * + * @throws ClassCastException if the field has not {@link Field.Mode#REPEATED} mode + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public List repeatedValue() { + checkNotNull(value); + return (List) value; + } + + /** + * Returns this field's value as a list of {@link FieldValue}. This method should only be used if + * the corresponding field has {@link Field.Type#record(Field...)} type (i.e. {@link #attribute()} + * is {@link Attribute#RECORD}). + * + * @throws ClassCastException if the field is not a {@link Field.Type#record(Field...)} type + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public List recordValue() { + checkNotNull(value); + return (List) value; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("attribute", attribute) + .add("value", value) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(attribute, value); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof FieldValue)) { + return false; + } + FieldValue other = (FieldValue) obj; + return attribute == other.attribute && Objects.equals(value, other.value); + } + + @SuppressWarnings("unchecked") + static FieldValue fromPb(Object cellPb) { + if (Data.isNull(cellPb)) { + return new FieldValue(Attribute.PRIMITIVE, null); + } + if (cellPb instanceof String) { + return new FieldValue(Attribute.PRIMITIVE, cellPb); + } + if (cellPb instanceof List) { + List cellsListPb = (List) cellPb; + List repeatedCells = Lists.newArrayListWithCapacity(cellsListPb.size()); + for (Object repeatedCellPb : cellsListPb) { + repeatedCells.add(FieldValue.fromPb(repeatedCellPb)); + } + return new FieldValue(Attribute.REPEATED, repeatedCells); + } + if (cellPb instanceof Map) { + Map cellMapPb = (Map) cellPb; + if (cellMapPb.containsKey("f")) { + List cellsListPb = (List) cellMapPb.get("f"); + List recordCells = Lists.newArrayListWithCapacity(cellsListPb.size()); + for (Object repeatedCellPb : cellsListPb) { + recordCells.add(FieldValue.fromPb(repeatedCellPb)); + } + return new FieldValue(Attribute.RECORD, recordCells); + } + // This should never be the case when we are processing a first level table field (i.e. a + // row's field, not a record sub-field) + if (cellMapPb.containsKey("v")) { + return FieldValue.fromPb(cellMapPb.get("v")); + } + } + throw new AssertionError("Unexpected table cell format"); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/FormatOptions.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/FormatOptions.java new file mode 100644 index 000000000000..e1f9d5aeb545 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/FormatOptions.java @@ -0,0 +1,90 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.common.base.MoreObjects; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Base class for Google BigQuery format options. These class define the format of external data + * used by BigQuery, for either federated tables or load jobs. + */ +public class FormatOptions implements Serializable { + + static final String CSV = "CSV"; + static final String JSON = "NEWLINE_DELIMITED_JSON"; + static final String DATASTORE_BACKUP = "DATASTORE_BACKUP"; + private static final long serialVersionUID = -443376052020423691L; + + private final String type; + + FormatOptions(String type) { + this.type = type; + } + + /** + * Returns the external data format, as a string. + */ + public String type() { + return type; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this).add("format", type).toString(); + } + + @Override + public int hashCode() { + return Objects.hash(type); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof FormatOptions && Objects.equals(type, ((FormatOptions) obj).type()); + } + + /** + * Default options for CSV format. + */ + public static CsvOptions csv() { + return CsvOptions.builder().build(); + } + + /** + * Default options for NEWLINE_DELIMITED_JSON format. + */ + public static FormatOptions json() { + return new FormatOptions(JSON); + } + + /** + * Default options for DATASTORE_BACKUP format. + */ + public static FormatOptions datastoreBackup() { + return new FormatOptions(DATASTORE_BACKUP); + } + + /** + * Default options for the provided format. + */ + public static FormatOptions of(String format) { + return new FormatOptions(format); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/InsertAllRequest.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/InsertAllRequest.java new file mode 100644 index 000000000000..56be098b197b --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/InsertAllRequest.java @@ -0,0 +1,406 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; + +import java.io.Serializable; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Google Cloud BigQuery insert all request. This class can be used to stream data into BigQuery one + * record at a time without needing to run a load job. This approach enables querying data without + * the delay of running a load job. There are several important trade-offs to consider before + * choosing an approach. + * + * @see Streaming Data into + * BigQuery + */ +public class InsertAllRequest implements Serializable { + + private static final long serialVersionUID = 211200307773853078L; + + private final TableId table; + private final List rows; + private final Boolean skipInvalidRows; + private final Boolean ignoreUnknownValues; + + /** + * A Google Big Query row to be inserted into a table. Each {@code RowToInsert} has an associated + * id used by BigQuery to detect duplicate insertion requests on a best-effort basis. + * + *

Example usage of creating a row to insert: + *

    {@code
+   *   List repeatedFieldValue = Arrays.asList(1L, 2L);
+   *   Map recordContent = new HashMap();
+   *   recordContent.put("subfieldName1", "value");
+   *   recordContent.put("subfieldName2", repeatedFieldValue);
+   *   Map rowContent = new HashMap();
+   *   rowContent.put("fieldName1", true);
+   *   rowContent.put("fieldName2", recordContent);
+   *   RowToInsert row = new RowToInsert("rowId", rowContent);
+   * }
+ * + * @see + * Data Consistency + */ + public static class RowToInsert implements Serializable { + + private static final long serialVersionUID = 8563060538219179157L; + + private final String id; + private final Map content; + + RowToInsert(String id, Map content) { + this.id = id; + this.content = ImmutableMap.copyOf(content); + } + + /** + * Returns the id associated with the row. Returns {@code null} if not set. + */ + public String id() { + return id; + } + + /** + * Returns the actual content of the row, as a map. + */ + public Map content() { + return content; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("id", id) + .add("content", content) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(id, content); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof RowToInsert)) { + return false; + } + RowToInsert other = (RowToInsert) obj; + return Objects.equals(id, other.id) + && Objects.equals(content, other.content); + } + + /** + * Creates a row to be inserted with associated id. + * + * @param id id of the row, used to identify duplicates + * @param content the actual content of the row + */ + public static RowToInsert of(String id, Map content) { + return new RowToInsert(checkNotNull(id), checkNotNull(content)); + } + + /** + * Creates a row to be inserted without associated id. + * + * @param content the actual content of the row + */ + public static RowToInsert of(Map content) { + return new RowToInsert(null, checkNotNull(content)); + } + } + + public static final class Builder { + + private TableId table; + private List rows; + private Boolean skipInvalidRows; + private Boolean ignoreUnknownValues; + + private Builder() {} + + /** + * Sets the destination table for rows insert request. + */ + public Builder table(TableId table) { + this.table = checkNotNull(table); + return this; + } + + /** + * Sets the rows to insert as a list of {@link RowToInsert} objects. + */ + public Builder rows(Iterable rows) { + this.rows = Lists.newLinkedList(checkNotNull(rows)); + return this; + } + + /** + * Adds a row to be inserted. + */ + public Builder addRow(RowToInsert rowToInsert) { + checkNotNull(rowToInsert); + if (rows == null) { + rows = Lists.newArrayList(); + } + rows.add(rowToInsert); + return this; + } + + /** + * Adds a row to be inserted with associated id. + * + *

Example usage of adding a row with associated id: + *

    {@code
+     *   InsertAllRequest.Builder builder = InsertAllRequest.builder(tableId);
+     *   List repeatedFieldValue = Arrays.asList(1L, 2L);
+     *   Map recordContent = new HashMap();
+     *   recordContent.put("subfieldName1", "value");
+     *   recordContent.put("subfieldName2", repeatedFieldValue);
+     *   Map rowContent = new HashMap();
+     *   rowContent.put("fieldName1", true);
+     *   rowContent.put("fieldName2", recordContent);
+     *   builder.addRow("rowId", rowContent);
+     * }
+ */ + public Builder addRow(String id, Map content) { + addRow(new RowToInsert(id, content)); + return this; + } + + /** + * Adds a row to be inserted without an associated id. + * + *

Example usage of adding a row without an associated id: + *

    {@code
+     *   InsertAllRequest.Builder builder = InsertAllRequest.builder(tableId);
+     *   List repeatedFieldValue = Arrays.asList(1L, 2L);
+     *   Map recordContent = new HashMap();
+     *   recordContent.put("subfieldName1", "value");
+     *   recordContent.put("subfieldName2", repeatedFieldValue);
+     *   Map rowContent = new HashMap();
+     *   rowContent.put("fieldName1", true);
+     *   rowContent.put("fieldName2", recordContent);
+     *   builder.addRow(rowContent);
+     * }
+ */ + public Builder addRow(Map content) { + addRow(new RowToInsert(null, content)); + return this; + } + + /** + * Sets whether to insert all valid rows of a request, even if invalid rows exist. If not set + * the entire insert request will fail if it contains an invalid row. + */ + public Builder skipInvalidRows(boolean skipInvalidRows) { + this.skipInvalidRows = skipInvalidRows; + return this; + } + + /** + * Sets whether to accept rows that contain values that do not match the schema. The unknown + * values are ignored. If not set, rows with unknown values are considered to be invalid. + */ + public Builder ignoreUnknownValues(boolean ignoreUnknownValues) { + this.ignoreUnknownValues = ignoreUnknownValues; + return this; + } + + public InsertAllRequest build() { + return new InsertAllRequest(this); + } + } + + private InsertAllRequest(Builder builder) { + this.table = checkNotNull(builder.table); + this.rows = ImmutableList.copyOf(checkNotNull(builder.rows)); + this.ignoreUnknownValues = builder.ignoreUnknownValues; + this.skipInvalidRows = builder.skipInvalidRows; + } + + /** + * Returns the destination table for rows insert request. + */ + public TableId table() { + return table; + } + + /** + * Returns the rows to be inserted. + */ + public List rows() { + return rows; + } + + /** + * Returns whether to accept rows that contain values that do not match the schema. The unknown + * values are ignored. If not set, rows with unknown values are considered to be invalid. + */ + public Boolean ignoreUnknownValues() { + return ignoreUnknownValues; + } + + /** + * Returns whether to insert all valid rows of a request, even if invalid rows exist. If not set + * the entire insert request will fail if it contains an invalid row. + */ + public Boolean skipInvalidRows() { + return skipInvalidRows; + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table. + */ + public static Builder builder(TableId table) { + return new Builder().table(table); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table and the + * rows to insert. + */ + public static Builder builder(TableId table, Iterable rows) { + return builder(table).rows(rows); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table and the + * rows to insert. + */ + public static Builder builder(TableId table, RowToInsert... rows) { + return builder(table, ImmutableList.copyOf(rows)); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table. + */ + public static Builder builder(String datasetId, String tableId) { + return new Builder().table(TableId.of(datasetId, tableId)); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table and the + * rows to insert. + */ + public static Builder builder(String datasetId, String tableId, Iterable rows) { + return builder(TableId.of(datasetId, tableId), rows); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table and the + * rows to insert. + */ + public static Builder builder(String datasetId, String tableId, RowToInsert... rows) { + return builder(TableId.of(datasetId, tableId), rows); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table and the + * rows to insert. + */ + public static Builder builder(BaseTableInfo tableInfo, Iterable rows) { + return builder(tableInfo.tableId(), rows); + } + + /** + * Returns a builder for an {@code InsertAllRequest} object given the destination table and the + * rows to insert. + */ + public static Builder builder(BaseTableInfo tableInfo, RowToInsert... rows) { + return builder(tableInfo.tableId(), rows); + } + + /** + * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. + */ + public static InsertAllRequest of(TableId tableId, Iterable rows) { + return builder(tableId, rows).build(); + } + + /** + * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. + */ + public static InsertAllRequest of(TableId tableId, RowToInsert... rows) { + return builder(tableId, rows).build(); + } + + /** + * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. + */ + public static InsertAllRequest of(String datasetId, String tableId, Iterable rows) { + return builder(datasetId, tableId, rows).build(); + } + + /** + * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. + */ + public static InsertAllRequest of(String datasetId, String tableId, RowToInsert... rows) { + return builder(datasetId, tableId, rows).build(); + } + + /** + * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. + */ + public static InsertAllRequest of(BaseTableInfo tableInfo, Iterable rows) { + return builder(tableInfo.tableId(), rows).build(); + } + + /** + * Returns a {@code InsertAllRequest} object given the destination table and the rows to insert. + */ + public static InsertAllRequest of(BaseTableInfo tableInfo, RowToInsert... rows) { + return builder(tableInfo.tableId(), rows).build(); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("table", table) + .add("rows", rows) + .add("ignoreUnknownValues", ignoreUnknownValues) + .add("skipInvalidRows", skipInvalidRows) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(table, rows, ignoreUnknownValues, skipInvalidRows); + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof InsertAllRequest)) { + return false; + } + InsertAllRequest other = (InsertAllRequest) obj; + return Objects.equals(table, other.table) + && Objects.equals(rows, other.rows) + && Objects.equals(ignoreUnknownValues, other.ignoreUnknownValues) + && Objects.equals(skipInvalidRows, other.skipInvalidRows); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/InsertAllResponse.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/InsertAllResponse.java new file mode 100644 index 000000000000..992c5d851bbc --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/InsertAllResponse.java @@ -0,0 +1,121 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.api.services.bigquery.model.ErrorProto; +import com.google.api.services.bigquery.model.TableDataInsertAllResponse; +import com.google.api.services.bigquery.model.TableDataInsertAllResponse.InsertErrors; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; + +import java.io.Serializable; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Google Cloud BigQuery insert all response. Objects of this class possibly contain errors for an + * {@link InsertAllRequest}. If a row failed to be inserted, the non-empty list of errors associated + * to that row's index can be obtained with {@link InsertAllResponse#errorsFor(long)}. + * {@link InsertAllResponse#insertErrors()} can be used to return all errors caused by a + * {@link InsertAllRequest} as a map. + */ +public class InsertAllResponse implements Serializable { + + private static final long serialVersionUID = -6934152676514098452L; + + private final Map> insertErrors; + + InsertAllResponse(Map> insertErrors) { + this.insertErrors = insertErrors != null ? ImmutableMap.copyOf(insertErrors) + : ImmutableMap.>of(); + } + + /** + * Returns all insertion errors as a map whose keys are indexes of rows that failed to insert. + * Each failed row index is associated with a non-empty list of {@link BigQueryError}. + */ + public Map> insertErrors() { + return insertErrors; + } + + /** + * Returns errors for the provided row index. If no error exists returns {@code null}. + */ + public List errorsFor(long index) { + return insertErrors.get(index); + } + + /** + * Returns {@code true} if no row insertion failed, {@code false} otherwise. If {@code false} + * {@link #insertErrors()} returns an empty map. + */ + public boolean hasErrors() { + return !insertErrors.isEmpty(); + } + + @Override + public int hashCode() { + return Objects.hash(insertErrors); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof InsertAllResponse + && Objects.equals(insertErrors, ((InsertAllResponse) obj).insertErrors); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this).add("insertErrors", insertErrors).toString(); + } + + TableDataInsertAllResponse toPb() { + TableDataInsertAllResponse responsePb = new TableDataInsertAllResponse(); + if (!insertErrors.isEmpty()) { + responsePb.setInsertErrors(ImmutableList.copyOf(Iterables.transform(insertErrors.entrySet(), + new Function>, InsertErrors>() { + @Override + public InsertErrors apply(Map.Entry> entry) { + return new InsertErrors() + .setIndex(entry.getKey()) + .setErrors(Lists.transform(entry.getValue(), BigQueryError.TO_PB_FUNCTION)); + } + }))); + } + return responsePb; + } + + static InsertAllResponse fromPb(TableDataInsertAllResponse responsePb) { + Map> insertErrors = null; + if (responsePb.getInsertErrors() != null) { + List errorsPb = responsePb.getInsertErrors(); + insertErrors = Maps.newHashMapWithExpectedSize(errorsPb.size()); + for (InsertErrors errorPb : errorsPb) { + insertErrors.put(errorPb.getIndex(), Lists.transform( + errorPb.getErrors() != null ? errorPb.getErrors() : ImmutableList.of(), + BigQueryError.FROM_PB_FUNCTION)); + } + } + return new InsertAllResponse(insertErrors); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobId.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobId.java new file mode 100644 index 000000000000..898c894f9a21 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobId.java @@ -0,0 +1,91 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.JobReference; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Google BigQuery Job identity. + */ +public class JobId implements Serializable { + + private static final long serialVersionUID = 1225914835379688976L; + + private final String project; + private final String job; + + /** + * Returns project's user-defined id. + */ + public String project() { + return project; + } + + /** + * Returns the job's user-defined id. + */ + public String job() { + return job; + } + + private JobId(String project, String job) { + this.project = project; + this.job = job; + } + + /** + * Creates a job identity given project's and job's user-defined id. + */ + public static JobId of(String project, String job) { + return new JobId(checkNotNull(project), checkNotNull(job)); + } + + /** + * Creates a job identity given only its user-defined id. + */ + public static JobId of(String job) { + return new JobId(null, checkNotNull(job)); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof JobId && Objects.equals(toPb(), ((JobId) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(project, job); + } + + @Override + public String toString() { + return toPb().toString(); + } + + JobReference toPb() { + return new JobReference().setProjectId(project).setJobId(job); + } + + static JobId fromPb(JobReference jobRef) { + return new JobId(jobRef.getProjectId(), jobRef.getJobId()); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobInfo.java new file mode 100644 index 000000000000..6d7efc147d25 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobInfo.java @@ -0,0 +1,301 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.api.services.bigquery.model.Job; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.base.MoreObjects.ToStringHelper; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Base class for Google BigQuery Job information. Jobs are objects that manage asynchronous tasks + * such as running queries, loading data, and exporting data. Use {@link CopyJobInfo} for a job that + * copies an existing table. Use {@link ExtractJobInfo} for a job that exports a table to Google + * Cloud Storage. Use {@link LoadJobInfo} for a job that loads data from Google Cloud Storage into + * a table. Use {@link QueryJobInfo} for a job that runs a query. + * + * @see Jobs + */ +public abstract class JobInfo implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public JobInfo apply(Job pb) { + return JobInfo.fromPb(pb); + } + }; + private static final long serialVersionUID = -7086529810736715842L; + + /** + * Specifies whether the job is allowed to create new tables. + */ + public enum CreateDisposition { + /** + * Configures the job to create the table if it does not exist. + */ + CREATE_IF_NEEDED, + + /** + * Configures the job to fail with a not-found error if the table does not exist. + */ + CREATE_NEVER + } + + /** + * Specifies the action that occurs if the destination table already exists. + */ + public enum WriteDisposition { + /** + * Configures the job to overwrite the table data if table already exists. + */ + WRITE_TRUNCATE, + + /** + * Configures the job to append data to the table if it already exists. + */ + WRITE_APPEND, + + /** + * Configures the job to fail with a duplicate error if the table already exists. + */ + WRITE_EMPTY + } + + private final String etag; + private final String id; + private final JobId jobId; + private final String selfLink; + private final JobStatus status; + private final S statistics; + private final String userEmail; + + public abstract static class Builder> { + + private String etag; + private String id; + private JobId jobId; + private String selfLink; + private JobStatus status; + private S statistics; + private String userEmail; + + protected Builder() {} + + protected Builder(JobInfo jobInfo) { + this.etag = jobInfo.etag; + this.id = jobInfo.id; + this.jobId = jobInfo.jobId; + this.selfLink = jobInfo.selfLink; + this.status = jobInfo.status; + this.statistics = jobInfo.statistics; + this.userEmail = jobInfo.userEmail; + + } + + protected Builder(Job jobPb) { + this.etag = jobPb.getEtag(); + this.id = jobPb.getId(); + if (jobPb.getJobReference() != null) { + this.jobId = JobId.fromPb(jobPb.getJobReference()); + } + this.selfLink = jobPb.getSelfLink(); + if (jobPb.getStatus() != null) { + this.status = JobStatus.fromPb(jobPb.getStatus()); + } + if (jobPb.getStatistics() != null) { + this.statistics = JobStatistics.fromPb(jobPb.getStatistics()); + } + this.userEmail = jobPb.getUserEmail(); + } + + @SuppressWarnings("unchecked") + protected B self() { + return (B) this; + } + + B etag(String etag) { + this.etag = etag; + return self(); + } + + B id(String id) { + this.id = id; + return self(); + } + + /** + * Sets the job identity. + */ + public B jobId(JobId jobId) { + this.jobId = jobId; + return self(); + } + + B selfLink(String selfLink) { + this.selfLink = selfLink; + return self(); + } + + B status(JobStatus status) { + this.status = status; + return self(); + } + + B statistics(S statistics) { + this.statistics = statistics; + return self(); + } + + B userEmail(String userEmail) { + this.userEmail = userEmail; + return self(); + } + + public abstract T build(); + } + + protected JobInfo(Builder builder) { + this.jobId = builder.jobId; + this.etag = builder.etag; + this.id = builder.id; + this.selfLink = builder.selfLink; + this.status = builder.status; + this.statistics = builder.statistics; + this.userEmail = builder.userEmail; + } + + /** + * Returns the hash of the job resource. + */ + public String etag() { + return etag; + } + + /** + * Returns an opaque id for the job. + */ + public String id() { + return id; + } + + /** + * Returns the job identity. + */ + public JobId jobId() { + return jobId; + } + + /** + * Returns an URL that can be used to access the resource again. The returned URL can be used for + * GET requests. + */ + public String selfLink() { + return selfLink; + } + + /** + * Returns the status of this job. Examine this value when polling an asynchronous job to see if + * the job is complete. + */ + public JobStatus status() { + return status; + } + + /** + * Returns information about the job, including starting time and ending time of the job. + */ + public S statistics() { + return statistics; + } + + /** + * Returns the email address of the user who ran the job. + */ + public String userEmail() { + return userEmail; + } + + /** + * Returns a builder for the job. + */ + public abstract Builder toBuilder(); + + ToStringHelper toStringHelper() { + return MoreObjects.toStringHelper(this) + .add("job", jobId) + .add("status", status) + .add("statistics", statistics) + .add("userEmail", userEmail) + .add("etag", etag) + .add("id", id) + .add("selfLink", selfLink); + } + + @Override + public String toString() { + return toStringHelper().toString(); + } + + @Override + public int hashCode() { + return Objects.hash(jobId); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof JobInfo && Objects.equals(toPb(), ((JobInfo) obj).toPb()); + } + + Job toPb() { + Job jobPb = new Job(); + jobPb.setEtag(etag); + jobPb.setId(id); + jobPb.setSelfLink(selfLink); + jobPb.setUserEmail(userEmail); + if (jobId != null) { + jobPb.setJobReference(jobId.toPb()); + } + if (status != null) { + jobPb.setStatus(status.toPb()); + } + if (statistics != null) { + jobPb.setStatistics(statistics.toPb()); + } + return jobPb; + } + + @SuppressWarnings("unchecked") + static T fromPb(Job jobPb) { + if (jobPb.getConfiguration().getLoad() != null) { + return (T) LoadJobInfo.fromPb(jobPb); + } else if (jobPb.getConfiguration().getCopy() != null) { + return (T) CopyJobInfo.fromPb(jobPb); + } else if (jobPb.getConfiguration().getExtract() != null) { + return (T) ExtractJobInfo.fromPb(jobPb); + } else if (jobPb.getConfiguration().getQuery() != null) { + return (T) QueryJobInfo.fromPb(jobPb); + } else { + // never reached + throw new IllegalArgumentException("Job configuration is not supported"); + } + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatistics.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatistics.java new file mode 100644 index 000000000000..8322a887a4a0 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatistics.java @@ -0,0 +1,487 @@ +package com.google.gcloud.bigquery; + +import com.google.api.services.bigquery.model.JobStatistics2; +import com.google.api.services.bigquery.model.JobStatistics3; +import com.google.api.services.bigquery.model.JobStatistics4; +import com.google.common.base.MoreObjects; +import com.google.common.base.MoreObjects.ToStringHelper; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * A Google BigQuery Job statistics. + */ +public class JobStatistics implements Serializable { + + private static final long serialVersionUID = 1433024714741660399L; + + private final Long creationTime; + private final Long endTime; + private final Long startTime; + + /** + * A Google BigQuery Extract Job statistics. + */ + public static class ExtractStatistics extends JobStatistics { + + private static final long serialVersionUID = -1566598819212767373L; + + private final List destinationUriFileCounts; + + static final class Builder extends JobStatistics.Builder { + + private List destinationUriFileCounts; + + private Builder() {} + + private Builder(com.google.api.services.bigquery.model.JobStatistics statisticsPb) { + super(statisticsPb); + this.destinationUriFileCounts = statisticsPb.getExtract().getDestinationUriFileCounts(); + } + + Builder destinationUriFileCounts(List destinationUriFileCounts) { + this.destinationUriFileCounts = destinationUriFileCounts; + return self(); + } + + @Override + ExtractStatistics build() { + return new ExtractStatistics(this); + } + } + + private ExtractStatistics(Builder builder) { + super(builder); + this.destinationUriFileCounts = builder.destinationUriFileCounts; + } + + /** + * Returns the number of files per destination URI or URI pattern specified in the extract job. + * These values will be in the same order as the URIs specified by + * {@link ExtractJobInfo#destinationUris()}. + */ + public List destinationUriFileCounts() { + return destinationUriFileCounts; + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper().add("destinationUriFileCounts", destinationUriFileCounts); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof ExtractStatistics + && Objects.equals(toPb(), ((ExtractStatistics) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), destinationUriFileCounts); + } + + @Override + com.google.api.services.bigquery.model.JobStatistics toPb() { + com.google.api.services.bigquery.model.JobStatistics statisticsPb = super.toPb(); + return statisticsPb.setExtract( + new JobStatistics4().setDestinationUriFileCounts(destinationUriFileCounts)); + } + + static Builder builder() { + return new Builder(); + } + + @SuppressWarnings("unchecked") + static ExtractStatistics fromPb( + com.google.api.services.bigquery.model.JobStatistics statisticPb) { + return new Builder(statisticPb).build(); + } + } + + /** + * A Google BigQuery Load Job statistics. + */ + public static class LoadStatistics extends JobStatistics { + + private static final long serialVersionUID = -707369246536309215L; + + private final Long inputBytes; + private final Long inputFiles; + private final Long outputBytes; + private final Long outputRows; + + static final class Builder extends JobStatistics.Builder { + + private Long inputBytes; + private Long inputFiles; + private Long outputBytes; + private Long outputRows; + + private Builder() {} + + private Builder(com.google.api.services.bigquery.model.JobStatistics statisticsPb) { + super(statisticsPb); + this.inputBytes = statisticsPb.getLoad().getInputFileBytes(); + this.inputFiles = statisticsPb.getLoad().getInputFiles(); + this.outputBytes = statisticsPb.getLoad().getOutputBytes(); + this.outputRows = statisticsPb.getLoad().getOutputRows(); + } + + Builder inputBytes(Long inputBytes) { + this.inputBytes = inputBytes; + return self(); + } + + Builder inputFiles(Long inputFiles) { + this.inputFiles = inputFiles; + return self(); + } + + Builder outputBytes(Long outputBytes) { + this.outputBytes = outputBytes; + return self(); + } + + Builder outputRows(Long outputRows) { + this.outputRows = outputRows; + return self(); + } + + @Override + LoadStatistics build() { + return new LoadStatistics(this); + } + } + + private LoadStatistics(Builder builder) { + super(builder); + this.inputBytes = builder.inputBytes; + this.inputFiles = builder.inputFiles; + this.outputBytes = builder.outputBytes; + this.outputRows = builder.outputRows; + + } + + /** + * Returns the number of bytes of source data in a load job. + */ + public Long inputBytes() { + return inputBytes; + } + + /** + * Returns the number of source files in a load job. + */ + public Long inputFiles() { + return inputFiles; + } + + /** + * Returns the size of the data loaded by a load job so far, in bytes. + */ + public Long outputBytes() { + return outputBytes; + } + + /** + * Returns the number of rows loaded by a load job so far. + */ + public Long outputRows() { + return outputRows; + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("inputBytes", inputBytes) + .add("inputFiles", inputFiles) + .add("outputBytes", outputBytes) + .add("outputRows", outputRows); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof LoadStatistics && Objects.equals(toPb(), ((LoadStatistics) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), inputBytes, inputFiles, outputBytes, outputRows); + } + + @Override + com.google.api.services.bigquery.model.JobStatistics toPb() { + JobStatistics3 loadStatisticsPb = new JobStatistics3(); + loadStatisticsPb.setInputFileBytes(inputBytes); + loadStatisticsPb.setInputFiles(inputFiles); + loadStatisticsPb.setOutputBytes(outputBytes); + loadStatisticsPb.setOutputRows(outputRows); + return super.toPb().setLoad(loadStatisticsPb); + } + + static Builder builder() { + return new Builder(); + } + + @SuppressWarnings("unchecked") + static LoadStatistics fromPb(com.google.api.services.bigquery.model.JobStatistics statisticPb) { + return new Builder(statisticPb).build(); + } + } + + /** + * A Google BigQuery Query Job statistics. + */ + public static class QueryStatistics extends JobStatistics { + + private static final long serialVersionUID = 7539354109226732353L; + + private final Integer billingTier; + private final Boolean cacheHit; + private final Long totalBytesBilled; + private final Long totalBytesProcessed; + + static final class Builder extends JobStatistics.Builder { + + private Integer billingTier; + private Boolean cacheHit; + private Long totalBytesBilled; + private Long totalBytesProcessed; + + private Builder() {} + + private Builder(com.google.api.services.bigquery.model.JobStatistics statisticsPb) { + super(statisticsPb); + this.billingTier = statisticsPb.getQuery().getBillingTier(); + this.cacheHit = statisticsPb.getQuery().getCacheHit(); + this.totalBytesBilled = statisticsPb.getQuery().getTotalBytesBilled(); + this.totalBytesProcessed = statisticsPb.getQuery().getTotalBytesProcessed(); + } + + Builder billingTier(Integer billingTier) { + this.billingTier = billingTier; + return self(); + } + + Builder cacheHit(Boolean cacheHit) { + this.cacheHit = cacheHit; + return self(); + } + + Builder totalBytesBilled(Long totalBytesBilled) { + this.totalBytesBilled = totalBytesBilled; + return self(); + } + + Builder totalBytesProcessed(Long totalBytesProcessed) { + this.totalBytesProcessed = totalBytesProcessed; + return self(); + } + + @Override + QueryStatistics build() { + return new QueryStatistics(this); + } + } + + private QueryStatistics(Builder builder) { + super(builder); + this.billingTier = builder.billingTier; + this.cacheHit = builder.cacheHit; + this.totalBytesBilled = builder.totalBytesBilled; + this.totalBytesProcessed = builder.totalBytesProcessed; + } + + /** + * Returns the billing tier for the job. + */ + public Integer billingTier() { + return billingTier; + } + + /** + * Returns whether the query result was fetched from the query cache. + * + * @see + * Query Caching + */ + public Boolean cacheHit() { + return cacheHit; + } + + /** + * Returns the total number of bytes billed for the job. + */ + public Long totalBytesBilled() { + return totalBytesBilled; + } + + /** + * Returns the total number of bytes processed by the job. + */ + public Long totalBytesProcessed() { + return totalBytesProcessed; + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("billingTier", billingTier) + .add("cacheHit", cacheHit) + .add("totalBytesBilled", totalBytesBilled) + .add("totalBytesProcessed", totalBytesProcessed); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof QueryStatistics + && Objects.equals(toPb(), ((QueryStatistics) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), billingTier, cacheHit, totalBytesBilled, + totalBytesProcessed); + } + + @Override + com.google.api.services.bigquery.model.JobStatistics toPb() { + JobStatistics2 queryStatisticsPb = new JobStatistics2(); + queryStatisticsPb.setBillingTier(billingTier); + queryStatisticsPb.setCacheHit(cacheHit); + queryStatisticsPb.setTotalBytesBilled(totalBytesBilled); + queryStatisticsPb.setTotalBytesProcessed(totalBytesProcessed); + return super.toPb().setQuery(queryStatisticsPb); + } + + static Builder builder() { + return new Builder(); + } + + @SuppressWarnings("unchecked") + static QueryStatistics fromPb( + com.google.api.services.bigquery.model.JobStatistics statisticPb) { + return new Builder(statisticPb).build(); + } + } + + static class Builder> { + + private Long creationTime; + private Long endTime; + private Long startTime; + + protected Builder() {} + + protected Builder(com.google.api.services.bigquery.model.JobStatistics statisticsPb) { + this.creationTime = statisticsPb.getCreationTime(); + this.endTime = statisticsPb.getEndTime(); + this.startTime = statisticsPb.getStartTime(); + } + + @SuppressWarnings("unchecked") + protected B self() { + return (B) this; + } + + B creationTime(Long creationTime) { + this.creationTime = creationTime; + return self(); + } + + B endTime(Long endTime) { + this.endTime = endTime; + return self(); + } + + B startTime(Long startTime) { + this.startTime = startTime; + return self(); + } + + @SuppressWarnings("unchecked") + T build() { + return (T) new JobStatistics(this); + } + } + + protected JobStatistics(Builder builder) { + this.creationTime = builder.creationTime; + this.endTime = builder.endTime; + this.startTime = builder.startTime; + } + + /** + * Returns the creation time of the job in milliseconds since epoch. + */ + public Long creationTime() { + return creationTime; + } + + /** + * Returns the end time of the job in milliseconds since epoch. Returns {@code null} if the + * job has not finished yet. + */ + public Long endTime() { + return endTime; + } + + /** + * Returns the start time of the job in milliseconds since epoch. Returns {@code null} if the + * job has not started yet. + */ + public Long startTime() { + return startTime; + } + + ToStringHelper toStringHelper() { + return MoreObjects.toStringHelper(this) + .add("creationTime", creationTime) + .add("endTime", endTime) + .add("startTime", startTime); + } + + @Override + public String toString() { + return toStringHelper().toString(); + } + + @Override + public int hashCode() { + return Objects.hash(creationTime, endTime, startTime); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof JobStatistics && Objects.equals(toPb(), ((JobStatistics) obj).toPb()); + } + + com.google.api.services.bigquery.model.JobStatistics toPb() { + com.google.api.services.bigquery.model.JobStatistics statistics = + new com.google.api.services.bigquery.model.JobStatistics(); + statistics.setCreationTime(creationTime); + statistics.setEndTime(endTime); + statistics.setStartTime(startTime); + return statistics; + } + + static Builder builder() { + return new Builder(); + } + + @SuppressWarnings("unchecked") + static T fromPb( + com.google.api.services.bigquery.model.JobStatistics statisticPb) { + if (statisticPb.getLoad() != null) { + return (T) LoadStatistics.fromPb(statisticPb); + } else if (statisticPb.getExtract() != null) { + return (T) ExtractStatistics.fromPb(statisticPb); + } else if (statisticPb.getQuery() != null) { + return (T) QueryStatistics.fromPb(statisticPb); + } else { + return (T) new Builder(statisticPb).build(); + } + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatus.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatus.java new file mode 100644 index 000000000000..738a644a5dde --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatus.java @@ -0,0 +1,130 @@ +package com.google.gcloud.bigquery; + +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * A Google BigQuery Job status. Objects of this class can be examined when polling an asynchronous + * job to see if the job completed. + */ +public class JobStatus implements Serializable { + + private static final long serialVersionUID = -714976456815445365L; + + /** + * Possible states that a BigQuery Job can assume. + */ + public enum State { + /** + * The BigQuery Job is waiting to be executed. + */ + PENDING, + + /** + * The BigQuery Job is being executed. + */ + RUNNING, + + /** + * The BigQuery Job has completed either succeeding or failing. If failed {@link #error()} will + * be non-null. + */ + DONE + } + + private final State state; + private final BigQueryError error; + private final List executionErrors; + + JobStatus(State state) { + this.state = state; + this.error = null; + this.executionErrors = null; + } + + JobStatus(State state, BigQueryError error, List executionErrors) { + this.state = state; + this.error = error; + this.executionErrors = executionErrors != null ? ImmutableList.copyOf(executionErrors) : null; + } + + /** + * Returns the state of the job. A {@link State#PENDING} job is waiting to be executed. A + * {@link State#RUNNING} is being executed. A {@link State#DONE} job has completed either + * succeeding or failing. If failed {@link #error()} will be non-null. + */ + public State state() { + return state; + } + + /** + * Returns the final error result of the job. If present, indicates that the job has completed + * and was unsuccessful. + * + * @see + * Troubleshooting Errors + */ + public BigQueryError error() { + return error; + } + + /** + * Returns all errors encountered during the running of the job. Errors here do not necessarily + * mean that the job has completed or was unsuccessful. + * + * @see + * Troubleshooting Errors + */ + public List executionErrors() { + return executionErrors; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("state", state) + .add("error", error) + .add("executionErrors", executionErrors) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(state, error, executionErrors); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof JobStatus && Objects.equals(toPb(), ((JobStatus) obj).toPb()); + } + + com.google.api.services.bigquery.model.JobStatus toPb() { + com.google.api.services.bigquery.model.JobStatus statusPb = + new com.google.api.services.bigquery.model.JobStatus(); + if (state != null) { + statusPb.setState(state.toString()); + } + if (error != null) { + statusPb.setErrorResult(error.toPb()); + } + if (executionErrors != null) { + statusPb.setErrors(Lists.transform(executionErrors, BigQueryError.TO_PB_FUNCTION)); + } + return statusPb; + } + + static JobStatus fromPb(com.google.api.services.bigquery.model.JobStatus statusPb) { + List allErrors = null; + if (statusPb.getErrors() != null) { + allErrors = Lists.transform(statusPb.getErrors(), BigQueryError.FROM_PB_FUNCTION); + } + BigQueryError error = + statusPb.getErrorResult() != null ? BigQueryError.fromPb(statusPb.getErrorResult()) : null; + return new JobStatus(State.valueOf(statusPb.getState()), error, allErrors); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadJobInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadJobInfo.java new file mode 100644 index 000000000000..df78b7ecec2f --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/LoadJobInfo.java @@ -0,0 +1,430 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.Job; +import com.google.api.services.bigquery.model.JobConfiguration; +import com.google.api.services.bigquery.model.JobConfigurationLoad; +import com.google.common.base.MoreObjects.ToStringHelper; +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.JobStatistics.LoadStatistics; + +import java.util.List; +import java.util.Objects; + +/** + * Google BigQuery Load Job. A Load Job loads data from one of several formats into a table. Data is + * provided as URIs that point to objects in Google Cloud Storage. + */ +public class LoadJobInfo extends JobInfo { + + private static final long serialVersionUID = 2515503817007974115L; + + private final List sourceUris; + private final TableId destinationTable; + private final CreateDisposition createDisposition; + private final WriteDisposition writeDisposition; + private final FormatOptions formatOptions; + private final Integer maxBadRecords; + private final Schema schema; + private final Boolean ignoreUnknownValues; + private final List projectionFields; + + public static final class Builder extends JobInfo.Builder { + + private List sourceUris; + private TableId destinationTable; + private CreateDisposition createDisposition; + private WriteDisposition writeDisposition; + private FormatOptions formatOptions; + private Integer maxBadRecords; + private Schema schema; + private Boolean ignoreUnknownValues; + private List projectionFields; + + private Builder() {} + + private Builder(LoadJobInfo jobInfo) { + super(jobInfo); + this.sourceUris = jobInfo.sourceUris; + this.destinationTable = jobInfo.destinationTable; + this.createDisposition = jobInfo.createDisposition; + this.writeDisposition = jobInfo.writeDisposition; + this.formatOptions = jobInfo.formatOptions; + this.maxBadRecords = jobInfo.maxBadRecords; + this.schema = jobInfo.schema; + this.ignoreUnknownValues = jobInfo.ignoreUnknownValues; + this.projectionFields = jobInfo.projectionFields; + } + + private Builder(Job jobPb) { + super(jobPb); + JobConfigurationLoad loadConfigurationPb = jobPb.getConfiguration().getLoad(); + this.sourceUris = loadConfigurationPb.getSourceUris(); + this.destinationTable = TableId.fromPb(loadConfigurationPb.getDestinationTable()); + if (loadConfigurationPb.getCreateDisposition() != null) { + this.createDisposition = + CreateDisposition.valueOf(loadConfigurationPb.getCreateDisposition()); + } + if (loadConfigurationPb.getWriteDisposition() != null) { + this.writeDisposition = WriteDisposition.valueOf(loadConfigurationPb.getWriteDisposition()); + } + if (loadConfigurationPb.getSourceFormat() != null) { + this.formatOptions = FormatOptions.of(loadConfigurationPb.getSourceFormat()); + } + if (loadConfigurationPb.getAllowJaggedRows() != null + || loadConfigurationPb.getAllowQuotedNewlines() != null + || loadConfigurationPb.getEncoding() != null + || loadConfigurationPb.getFieldDelimiter() != null + || loadConfigurationPb.getQuote() != null + || loadConfigurationPb.getSkipLeadingRows() != null) { + CsvOptions.Builder builder = CsvOptions.builder() + .allowJaggedRows(loadConfigurationPb.getAllowJaggedRows()) + .allowQuotedNewLines(loadConfigurationPb.getAllowQuotedNewlines()) + .encoding(loadConfigurationPb.getEncoding()) + .fieldDelimiter(loadConfigurationPb.getFieldDelimiter()) + .quote(loadConfigurationPb.getQuote()) + .skipLeadingRows(loadConfigurationPb.getSkipLeadingRows()); + this.formatOptions = builder.build(); + } + this.maxBadRecords = loadConfigurationPb.getMaxBadRecords(); + if (loadConfigurationPb.getSchema() != null) { + this.schema = Schema.fromPb(loadConfigurationPb.getSchema()); + } + this.ignoreUnknownValues = loadConfigurationPb.getIgnoreUnknownValues(); + this.projectionFields = loadConfigurationPb.getProjectionFields(); + } + + /** + * Sets the fully-qualified URIs that point to source data in Google Cloud Storage (e.g. + * gs://bucket/path). Each URI can contain one '*' wildcard character and it must come after the + * 'bucket' name. + */ + public Builder sourceUris(List sourceUris) { + this.sourceUris = sourceUris != null ? ImmutableList.copyOf(sourceUris) : null; + return this; + } + + /** + * Sets the destination table to load the data into. + */ + public Builder destinationTable(TableId destinationTable) { + this.destinationTable = destinationTable; + return this; + } + + /** + * Sets whether the job is allowed to create new tables. + * + * @see + * Jobs: Load Configuration + */ + public Builder createDisposition(CreateDisposition createDisposition) { + this.createDisposition = createDisposition; + return this; + } + + /** + * Sets the action that should occur if the destination table already exists. + * + * @see + * Jobs: Load Configuration + */ + public Builder writeDisposition(WriteDisposition writeDisposition) { + this.writeDisposition = writeDisposition; + return this; + } + + /** + * Sets the source format, and possibly some parsing options, of the external data. Supported + * formats are {@code CSV}, {@code NEWLINE_DELIMITED_JSON} and {@code DATASTORE_BACKUP}. If not + * specified, {@code CSV} format is assumed. + * + * + * Source Format + */ + public Builder formatOptions(FormatOptions formatOptions) { + this.formatOptions = formatOptions; + return this; + } + + /** + * Sets the maximum number of bad records that BigQuery can ignore when running the job. If the + * number of bad records exceeds this value, an invalid error is returned in the job result. + * By default no bad record is ignored. + */ + public Builder maxBadRecords(Integer maxBadRecords) { + this.maxBadRecords = maxBadRecords; + return this; + } + + /** + * Sets the schema for the destination table. The schema can be omitted if the destination table + * already exists, or if you're loading data from Google Cloud Datastore. + */ + public Builder schema(Schema schema) { + this.schema = schema; + return this; + } + + /** + * Sets whether BigQuery should allow extra values that are not represented in the table schema. + * If {@code true}, the extra values are ignored. If {@code true}, records with extra columns + * are treated as bad records, and if there are too many bad records, an invalid error is + * returned in the job result. By default unknown values are not allowed. + */ + public Builder ignoreUnknownValues(Boolean ignoreUnknownValues) { + this.ignoreUnknownValues = ignoreUnknownValues; + return this; + } + + /** + * Sets which entity properties to load into BigQuery from a Cloud Datastore backup. This field + * is only used if the source format is set to {@code DATASTORE_BACKUP}. Property names are case + * sensitive and must be top-level properties. If no properties are specified, BigQuery loads + * all properties. If any named property isn't found in the Cloud Datastore backup, an invalid + * error is returned in the job result. + */ + public Builder projectionFields(List projectionFields) { + this.projectionFields = + projectionFields != null ? ImmutableList.copyOf(projectionFields) : null; + return this; + } + + @Override + public LoadJobInfo build() { + return new LoadJobInfo(this); + } + } + + private LoadJobInfo(Builder builder) { + super(builder); + this.sourceUris = builder.sourceUris; + this.destinationTable = checkNotNull(builder.destinationTable); + this.createDisposition = builder.createDisposition; + this.writeDisposition = builder.writeDisposition; + this.formatOptions = builder.formatOptions; + this.maxBadRecords = builder.maxBadRecords; + this.schema = builder.schema; + this.ignoreUnknownValues = builder.ignoreUnknownValues; + this.projectionFields = builder.projectionFields; + } + + /** + * Returns the fully-qualified URIs that point to source data in Google Cloud Storage (e.g. + * gs://bucket/path). Each URI can contain one '*' wildcard character and it must come after the + * 'bucket' name. + */ + public List sourceUris() { + return sourceUris; + } + + /** + * Returns the destination table to load the data into. + */ + public TableId destinationTable() { + return destinationTable; + } + + /** + * Returns whether the job is allowed to create new tables. + * + * @see + * Jobs: Load Configuration + */ + public CreateDisposition createDisposition() { + return this.createDisposition; + } + + /** + * Returns the action that should occur if the destination table already exists. + * + * @see + * Jobs: Load Configuration + */ + public WriteDisposition writeDisposition() { + return writeDisposition; + } + + /** + * Returns additional properties used to parse CSV data (used when {@link #format()} is set + * to CSV). Returns {@code null} if not set. + */ + public CsvOptions csvOptions() { + return formatOptions instanceof CsvOptions ? (CsvOptions) formatOptions : null; + } + + /** + * Returns the maximum number of bad records that BigQuery can ignore when running the job. If the + * number of bad records exceeds this value, an invalid error is returned in the job result. + * By default no bad record is ignored. + */ + public Integer maxBadRecords() { + return maxBadRecords; + } + + /** + * Returns the schema for the destination table, if set. Returns {@code null} otherwise. + */ + public Schema schema() { + return schema; + } + + /** + * Returns the format of the data files. + */ + public String format() { + return formatOptions != null ? formatOptions.type() : null; + } + + /** + * Returns whether BigQuery should allow extra values that are not represented in the table + * schema. If {@code true}, the extra values are ignored. If {@code true}, records with extra + * columns are treated as bad records, and if there are too many bad records, an invalid error is + * returned in the job result. By default unknown values are not allowed. + */ + public Boolean ignoreUnknownValues() { + return ignoreUnknownValues; + } + + /** + * Returns which entity properties to load into BigQuery from a Cloud Datastore backup. This field + * is only used if the source format is set to {@code DATASTORE_BACKUP}. Property names are case + * sensitive and must be top-level properties. If no properties are specified, BigQuery loads + * all properties. If any named property isn't found in the Cloud Datastore backup, an invalid + * error is returned in the job result. + */ + public List projectionFields() { + return projectionFields; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("destinationTable", destinationTable) + .add("sourceUris", sourceUris) + .add("createDisposition", createDisposition) + .add("writeDisposition", writeDisposition) + .add("formatOptions", formatOptions) + .add("maxBadRecords", maxBadRecords) + .add("schema", schema) + .add("ignoreUnknownValue", ignoreUnknownValues) + .add("projectionFields", projectionFields); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof LoadJobInfo && Objects.equals(toPb(), ((LoadJobInfo) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), sourceUris, destinationTable, createDisposition, + writeDisposition, formatOptions, maxBadRecords, schema, ignoreUnknownValues, + projectionFields); + } + + @Override + Job toPb() { + JobConfigurationLoad loadConfigurationPb = new JobConfigurationLoad(); + loadConfigurationPb.setSourceUris(sourceUris); + loadConfigurationPb.setDestinationTable(destinationTable.toPb()); + if (createDisposition != null) { + loadConfigurationPb.setCreateDisposition(createDisposition.toString()); + } + if (writeDisposition != null) { + loadConfigurationPb.setWriteDisposition(writeDisposition.toString()); + } + if (csvOptions() != null) { + CsvOptions csvOptions = csvOptions(); + loadConfigurationPb.setFieldDelimiter(csvOptions.fieldDelimiter()) + .setAllowJaggedRows(csvOptions.allowJaggedRows()) + .setAllowQuotedNewlines(csvOptions.allowQuotedNewLines()) + .setEncoding(csvOptions.encoding()) + .setQuote(csvOptions.quote()) + .setSkipLeadingRows(csvOptions.skipLeadingRows()); + } + if (schema != null) { + loadConfigurationPb.setSchema(schema.toPb()); + } + if (formatOptions != null) { + loadConfigurationPb.setSourceFormat(formatOptions.type()); + } + loadConfigurationPb.setMaxBadRecords(maxBadRecords); + loadConfigurationPb.setIgnoreUnknownValues(ignoreUnknownValues); + loadConfigurationPb.setProjectionFields(projectionFields); + return super.toPb().setConfiguration(new JobConfiguration().setLoad(loadConfigurationPb)); + } + + /** + * Creates a builder for a BigQuery Load Job given destination table and source URI. + */ + public static Builder builder(TableId destinationTable, String sourceUri) { + return builder(destinationTable, ImmutableList.of(checkNotNull(sourceUri))); + } + + /** + * Creates a builder for a BigQuery Load Job given destination table and source URIs. + */ + public static Builder builder(TableId destinationTable, List sourceUris) { + return new Builder().destinationTable(destinationTable).sourceUris(sourceUris); + } + + /** + * Returns a BigQuery Load Job for the given destination table and source URI. Job's id is chosen + * by the service. + */ + public static LoadJobInfo of(TableId destinationTable, String sourceUri) { + return builder(destinationTable, sourceUri).build(); + } + + /** + * Returns a BigQuery Load Job for the given destination table and source URIs. Job's id is chosen + * by the service. + */ + public static LoadJobInfo of(TableId destinationTable, List sourceUris) { + return builder(destinationTable, sourceUris).build(); + } + + /** + * Returns a BigQuery Load Job for the given destination table and source URI. Job's id is set to + * the provided value. + */ + public static LoadJobInfo of(JobId jobId, TableId destinationTable, String sourceUri) { + return builder(destinationTable, sourceUri).jobId(jobId).build(); + } + + /** + * Returns a BigQuery Load Job for the given destination table and source URIs. Job's id is set to + * the provided value. + */ + public static LoadJobInfo of(JobId jobId, TableId destinationTable, List sourceUris) { + return builder(destinationTable, sourceUris).jobId(jobId).build(); + } + + @SuppressWarnings("unchecked") + static LoadJobInfo fromPb(Job jobPb) { + return new Builder(jobPb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Option.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Option.java new file mode 100644 index 000000000000..d88820fe5a29 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Option.java @@ -0,0 +1,72 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.base.MoreObjects; +import com.google.gcloud.spi.BigQueryRpc; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Base class for BigQuery operation option. + */ +class Option implements Serializable { + + private static final long serialVersionUID = -6647817677804099207L; + + private final BigQueryRpc.Option rpcOption; + private final Object value; + + Option(BigQueryRpc.Option rpcOption, Object value) { + this.rpcOption = checkNotNull(rpcOption); + this.value = value; + } + + BigQueryRpc.Option rpcOption() { + return rpcOption; + } + + Object value() { + return value; + } + + @Override + public boolean equals(Object obj) { + if (!(obj instanceof Option)) { + return false; + } + Option other = (Option) obj; + return Objects.equals(rpcOption, other.rpcOption) + && Objects.equals(value, other.value); + } + + @Override + public int hashCode() { + return Objects.hash(rpcOption, value); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("name", rpcOption.value()) + .add("value", value) + .toString(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobInfo.java new file mode 100644 index 000000000000..dd09d7010a50 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobInfo.java @@ -0,0 +1,520 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.Job; +import com.google.api.services.bigquery.model.JobConfiguration; +import com.google.api.services.bigquery.model.JobConfigurationQuery; +import com.google.common.base.MoreObjects.ToStringHelper; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.gcloud.bigquery.JobStatistics.QueryStatistics; + +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Google BigQuery Query Job. A Query Job runs a query against BigQuery data. + */ +public class QueryJobInfo extends JobInfo { + + private static final long serialVersionUID = -8708709356039780158L; + + /** + * Priority levels for a query. If not specified the priority is assumed to be + * {@link Priority#INTERACTIVE}. + */ + public enum Priority { + /** + * Query is executed as soon as possible and count towards the + * concurrent rate limit and the daily + * rate limit. + */ + INTERACTIVE, + + /** + * Query is queued and started as soon as idle resources are available, usually within a few + * minutes. If a {@link Priority#BATCH} query hasn't started within 3 hours, its priority is + * changed to {@link Priority#INTERACTIVE}. + */ + BATCH + } + + private final String query; + private final TableId destinationTable; + private final Map tableDefinitions; + private final List userDefinedFunctions; + private final CreateDisposition createDisposition; + private final WriteDisposition writeDisposition; + private final DatasetId defaultDataset; + private final Priority priority; + private final Boolean allowLargeResults; + private final Boolean useQueryCache; + private final Boolean flattenResults; + private final Boolean dryRun; + + public static final class Builder extends JobInfo.Builder { + + private String query; + private TableId destinationTable; + private Map tableDefinitions; + private List userDefinedFunctions; + private CreateDisposition createDisposition; + private WriteDisposition writeDisposition; + private DatasetId defaultDataset; + private Priority priority; + private Boolean allowLargeResults; + private Boolean useQueryCache; + private Boolean flattenResults; + private Boolean dryRun; + + private Builder() {} + + private Builder(QueryJobInfo jobInfo) { + super(jobInfo); + this.query = jobInfo.query; + this.destinationTable = jobInfo.destinationTable; + this.tableDefinitions = jobInfo.tableDefinitions; + this.userDefinedFunctions = jobInfo.userDefinedFunctions; + this.createDisposition = jobInfo.createDisposition; + this.writeDisposition = jobInfo.writeDisposition; + this.defaultDataset = jobInfo.defaultDataset; + this.priority = jobInfo.priority; + this.allowLargeResults = jobInfo.allowLargeResults; + this.useQueryCache = jobInfo.useQueryCache; + this.flattenResults = jobInfo.flattenResults; + this.dryRun = jobInfo.dryRun; + } + + private Builder(Job jobPb) { + super(jobPb); + JobConfigurationQuery queryConfigurationPb = jobPb.getConfiguration().getQuery(); + this.query = queryConfigurationPb.getQuery(); + allowLargeResults = queryConfigurationPb.getAllowLargeResults(); + useQueryCache = queryConfigurationPb.getUseQueryCache(); + flattenResults = queryConfigurationPb.getFlattenResults(); + dryRun = jobPb.getConfiguration().getDryRun(); + if (queryConfigurationPb.getDestinationTable() != null) { + destinationTable = TableId.fromPb(queryConfigurationPb.getDestinationTable()); + } + if (queryConfigurationPb.getDefaultDataset() != null) { + defaultDataset = DatasetId.fromPb(queryConfigurationPb.getDefaultDataset()); + } + if (queryConfigurationPb.getPriority() != null) { + priority = Priority.valueOf(queryConfigurationPb.getPriority()); + } + if (queryConfigurationPb.getTableDefinitions() != null) { + tableDefinitions = Maps.transformValues(queryConfigurationPb.getTableDefinitions(), + ExternalDataConfiguration.FROM_PB_FUNCTION); + } + if (queryConfigurationPb.getUserDefinedFunctionResources() != null) { + userDefinedFunctions = Lists.transform( + queryConfigurationPb.getUserDefinedFunctionResources(), + UserDefinedFunction.FROM_PB_FUNCTION); + } + if (queryConfigurationPb.getCreateDisposition() != null) { + createDisposition = CreateDisposition.valueOf(queryConfigurationPb.getCreateDisposition()); + } + if (queryConfigurationPb.getWriteDisposition() != null) { + writeDisposition = WriteDisposition.valueOf(queryConfigurationPb.getWriteDisposition()); + } + } + + /** + * Sets the BigQuery SQL query to execute. + */ + public Builder query(String query) { + this.query = query; + return self(); + } + + /** + * Sets the table where to put query results. If not provided a new table is created. This value + * is required if {@link Builder#allowLargeResults(Boolean)} is set to {@code true}. + */ + public Builder destinationTable(TableId destinationTable) { + this.destinationTable = destinationTable; + return self(); + } + + /** + * Sets the external tables definitions. If querying external data sources outside of BigQuery, + * this value describes the data format, location and other properties of the data + * sources. By defining these properties, the data sources can be queried as if they were + * standard BigQuery tables. + */ + public Builder tableDefinitions(Map tableDefinitions) { + this.tableDefinitions = tableDefinitions != null ? Maps.newHashMap(tableDefinitions) : null; + return self(); + } + + /** + * Adds a new external table definition. If a definition already exists for {@code tableName} + * it is updated. + * + * @param tableName name of the table + * @param tableDefinition external data configuration for the table used by this query + */ + public Builder addTableDefinition(String tableName, ExternalDataConfiguration tableDefinition) { + if (this.tableDefinitions == null) { + this.tableDefinitions = Maps.newHashMap(); + } + this.tableDefinitions.put(checkNotNull(tableName), checkNotNull(tableDefinition)); + return self(); + } + + /** + * Sets user defined function resources that can be used by this query. Function resources + * can either be defined inline ({@link UserDefinedFunction#inline(String)}) or loaded from + * a Google Cloud Storage URI ({@link UserDefinedFunction#fromUri(String)}. + */ + public Builder userDefinedFunctions(List userDefinedFunctions) { + this.userDefinedFunctions = + userDefinedFunctions != null ? ImmutableList.copyOf(userDefinedFunctions) : null; + return self(); + } + + /** + * Sets whether the job is allowed to create tables. + * + * @see + * Jobs: Query Configuration + */ + public Builder createDisposition(CreateDisposition createDisposition) { + this.createDisposition = createDisposition; + return self(); + } + + /** + * Sets the action that should occur if the destination table already exists. + * + * @see + * Jobs: Query Configuration + */ + public Builder writeDisposition(WriteDisposition writeDisposition) { + this.writeDisposition = writeDisposition; + return self(); + } + + /** + * Sets the default dataset. This dataset is used for all unqualified table names used in the + * query. + */ + public Builder defaultDataset(DatasetId defaultDataset) { + this.defaultDataset = defaultDataset; + return self(); + } + + /** + * Sets a priority for the query. If not specified the priority is assumed to be + * {@link Priority#INTERACTIVE}. + */ + public Builder priority(Priority priority) { + this.priority = priority; + return self(); + } + + /** + * Sets whether the job is enabled to create arbitrarily large results. If {@code true} + * the query is allowed to create large results at a slight cost in performance. If {@code true} + * {@link Builder#destinationTable(TableId)} must be provided. + * + * @see + * Returning Large Query Results + */ + public Builder allowLargeResults(Boolean allowLargeResults) { + this.allowLargeResults = allowLargeResults; + return self(); + } + + /** + * Sets whether to look for the result in the query cache. The query cache is a best-effort + * cache that will be flushed whenever tables in the query are modified. Moreover, the query + * cache is only available when {@link Builder#destinationTable(TableId)} is not set. + * + * @see Query Caching + */ + public Builder useQueryCache(Boolean useQueryCache) { + this.useQueryCache = useQueryCache; + return self(); + } + + /** + * Sets whether nested and repeated fields should be flattened. If set to {@code false} + * {@link Builder#allowLargeResults(Boolean)} must be {@code true}. By default results are + * flattened. + * + * @see Flatten + */ + public Builder flattenResults(Boolean flattenResults) { + this.flattenResults = flattenResults; + return self(); + } + + /** + * Sets whether the job has to be dry run or not. If set, the job is not executed. A valid query + * will return a mostly empty response with some processing statistics, while an invalid query + * will return the same error it would if it wasn't a dry run. + */ + public Builder dryRun(Boolean dryRun) { + this.dryRun = dryRun; + return self(); + } + + @Override + public QueryJobInfo build() { + return new QueryJobInfo(this); + } + } + + private QueryJobInfo(Builder builder) { + super(builder); + this.query = checkNotNull(builder.query); + this.allowLargeResults = builder.allowLargeResults; + this.createDisposition = builder.createDisposition; + this.defaultDataset = builder.defaultDataset; + this.destinationTable = builder.destinationTable; + this.flattenResults = builder.flattenResults; + this.priority = builder.priority; + this.useQueryCache = builder.useQueryCache; + this.userDefinedFunctions = builder.userDefinedFunctions; + this.writeDisposition = builder.writeDisposition; + this.tableDefinitions = + builder.tableDefinitions != null ? ImmutableMap.copyOf(builder.tableDefinitions) : null; + this.dryRun = builder.dryRun; + } + + /** + * Returns whether the job is enabled to create arbitrarily large results. If {@code true} + * the query is allowed to create large results at a slight cost in performance. + * the query is allowed to create large results at a slight cost in performance. + * + * @see + * Returning Large Query Results + */ + public Boolean allowLargeResults() { + return allowLargeResults; + } + + /** + * Returns whether the job is allowed to create new tables. + * + * @see + * Jobs: Query Configuration + */ + public CreateDisposition createDisposition() { + return createDisposition; + } + + /** + * Returns the default dataset. This dataset is used for all unqualified table names used in the + * query. + */ + public DatasetId defaultDataset() { + return defaultDataset; + } + + /** + * Returns the table where to put query results. If not provided a new table is created. This + * value is required if {@link #allowLargeResults()} is {@code true}. + */ + public TableId destinationTable() { + return destinationTable; + } + + /** + * Returns whether nested and repeated fields should be flattened. If set to {@code false} + * {@link Builder#allowLargeResults(Boolean)} must be {@code true}. + * + * @see Flatten + */ + public Boolean flattenResults() { + return flattenResults; + } + + /** + * Returns the query priority. + */ + public Priority priority() { + return priority; + } + + /** + * Returns the Google BigQuery SQL query. + */ + public String query() { + return query; + } + + /** + * Returns the external tables definitions. If querying external data sources outside of BigQuery, + * this value describes the data format, location and other properties of the data + * sources. By defining these properties, the data sources can be queried as if they were + * standard BigQuery tables. + */ + public Map tableDefinitions() { + return tableDefinitions; + } + + /** + * Returns whether to look for the result in the query cache. The query cache is a best-effort + * cache that will be flushed whenever tables in the query are modified. Moreover, the query + * cache is only available when {@link Builder#destinationTable(TableId)} is not set. + * + * @see Query Caching + */ + public Boolean useQueryCache() { + return useQueryCache; + } + + /** + * Returns user defined function resources that can be used by this query. Function resources + * can either be defined inline ({@link UserDefinedFunction.Type#INLINE}) or loaded from + * a Google Cloud Storage URI ({@link UserDefinedFunction.Type#FROM_URI}. + */ + public List userDefinedFunctions() { + return userDefinedFunctions; + } + + /** + * Returns the action that should occur if the destination table already exists. + * + * @see + * Jobs: Query Configuration + */ + public WriteDisposition writeDisposition() { + return writeDisposition; + } + + /** + * Returns whether the job has to be dry run or not. If set, the job is not executed. A valid + * query will return a mostly empty response with some processing statistics, while an invalid + * query will return the same error it would if it wasn't a dry run. + */ + public Boolean dryRun() { + return dryRun; + } + + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("query", query) + .add("destinationTable", destinationTable) + .add("defaultDataset", defaultDataset) + .add("allowLargeResults", allowLargeResults) + .add("flattenResults", flattenResults) + .add("priority", priority) + .add("tableDefinitions", tableDefinitions) + .add("userQueryCache", useQueryCache) + .add("userDefinedFunctions", userDefinedFunctions) + .add("createDisposition", createDisposition) + .add("writeDisposition", writeDisposition) + .add("dryRun", dryRun); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof QueryJobInfo && Objects.equals(toPb(), ((QueryJobInfo) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), allowLargeResults, createDisposition, destinationTable, + defaultDataset, flattenResults, priority, query, tableDefinitions, useQueryCache, + userDefinedFunctions, writeDisposition, dryRun); + } + + @Override + Job toPb() { + JobConfiguration configurationPb = new JobConfiguration(); + JobConfigurationQuery queryConfigurationPb = new JobConfigurationQuery(); + queryConfigurationPb.setQuery(query); + configurationPb.setDryRun(dryRun()); + if (allowLargeResults != null) { + queryConfigurationPb.setAllowLargeResults(allowLargeResults); + } + if (createDisposition != null) { + queryConfigurationPb.setCreateDisposition(createDisposition.toString()); + } + if (destinationTable != null) { + queryConfigurationPb.setDestinationTable(destinationTable.toPb()); + } + if (defaultDataset != null) { + queryConfigurationPb.setDefaultDataset(defaultDataset.toPb()); + } + if (flattenResults != null) { + queryConfigurationPb.setFlattenResults(flattenResults); + } + if (priority != null) { + queryConfigurationPb.setPriority(priority.toString()); + } + if (tableDefinitions != null) { + queryConfigurationPb.setTableDefinitions( + Maps.transformValues(tableDefinitions, ExternalDataConfiguration.TO_PB_FUNCTION)); + } + if (useQueryCache != null) { + queryConfigurationPb.setUseQueryCache(useQueryCache); + } + if (userDefinedFunctions != null) { + queryConfigurationPb.setUserDefinedFunctionResources( + Lists.transform(userDefinedFunctions, UserDefinedFunction.TO_PB_FUNCTION)); + } + if (writeDisposition != null) { + queryConfigurationPb.setWriteDisposition(writeDisposition.toString()); + } + return super.toPb().setConfiguration(configurationPb.setQuery(queryConfigurationPb)); + } + + /** + * Creates a builder for a BigQuery Query Job given the query to be run. + */ + public static Builder builder(String query) { + return new Builder().query(query); + } + + /** + * Returns a BigQuery Copy Job for the given the query to be run. Job's id is chosen by the + * service. + */ + public static QueryJobInfo of(String query) { + return builder(query).build(); + } + + /** + * Returns a BigQuery Copy Job for the given the query to be run. Job's id is set to the provided + * value. + */ + public static QueryJobInfo of(JobId jobId, String query) { + return builder(query).jobId(jobId).build(); + } + + @SuppressWarnings("unchecked") + static QueryJobInfo fromPb(Job jobPb) { + return new Builder(jobPb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryRequest.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryRequest.java new file mode 100644 index 000000000000..0c0cf3de761d --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryRequest.java @@ -0,0 +1,303 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.base.MoreObjects; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Google Cloud BigQuery Query Request. This class can be used to run a BigQuery SQL query and + * return results if the query completes within a specified timeout. The query results are saved to + * a temporary table that is deleted approximately 24 hours after the query is run. The query is run + * through a BigQuery Job whose identity can be accessed via {@link QueryResponse#jobId()}. If the + * query does not complete within the provided {@link Builder#maxWaitTime(Long)}, the response + * returned by {@link BigQuery#query(QueryRequest)} will have {@link QueryResponse#jobComplete()} + * set to {@code false} and {@link QueryResponse#result()} set to {@code null}. To obtain query + * results you can use {@link BigQuery#getQueryResults(JobId, BigQuery.QueryResultsOption...)} until + * {@link QueryResponse#jobComplete()} returns {@code true}. + * + *

Example usage of a query request: + *

    {@code
+ *    // Substitute "field", "table" and "dataset" with real field, table and dataset identifiers
+ *    QueryRequest request = QueryRequest.builder("SELECT field FROM table")
+ *      .defaultDataset(DatasetId.of("dataset"))
+ *      .maxWaitTime(60000L)
+ *      .maxResults(1000L)
+ *      .build();
+ *    QueryResponse response = bigquery.query(request);
+ *    while (!response.jobComplete()) {
+ *      Thread.sleep(1000);
+ *      response = bigquery.getQueryResults(response.jobId());
+ *    }
+ *    List executionErrors = response.executionErrors();
+ *    // look for errors in executionErrors
+ *    QueryResult result = response.result();
+ *    Iterator> rowIterator = result.iterateAll();
+ *    while(rowIterator.hasNext()) {
+ *      List row = rowIterator.next();
+ *      // do something with row
+ *    }
+ * }
+ * + * @see Query + * @see Query Reference + */ +public class QueryRequest implements Serializable { + + private static final long serialVersionUID = -8727328332415880852L; + + private final String query; + private final Long maxResults; + private final DatasetId defaultDataset; + private final Long maxWaitTime; + private final Boolean dryRun; + private final Boolean useQueryCache; + + public static final class Builder { + + private String query; + private Long maxResults; + private DatasetId defaultDataset; + private Long maxWaitTime; + private Boolean dryRun; + private Boolean useQueryCache; + + private Builder() {} + + /** + * Sets the BigQuery query to be executed. + */ + public Builder query(String query) { + this.query = checkNotNull(query); + return this; + } + + /** + * Sets the maximum number of rows of data to return per page of results. Setting this flag to a + * small value such as 1000 and then paging through results might improve reliability when the + * query result set is large. In addition to this limit, responses are also limited to 10 MB. + * By default, there is no maximum row count, and only the byte limit applies. + */ + public Builder maxResults(Long maxResults) { + this.maxResults = maxResults; + return this; + } + + /** + * Sets the default dataset to assume for any unqualified table names in the query. + */ + public Builder defaultDataset(DatasetId defaultDataset) { + this.defaultDataset = defaultDataset; + return this; + } + + /** + * Sets how long to wait for the query to complete, in milliseconds, before the request times + * out and returns. Note that this is only a timeout for the request, not the query. If the + * query takes longer to run than the timeout value, the call returns without any results and + * with the {@link QueryResponse#jobComplete()} set to {@code false}. If not set, a wait time of + * 10000 milliseconds (10 seconds) is used. + */ + public Builder maxWaitTime(Long maxWaitTime) { + this.maxWaitTime = maxWaitTime; + return this; + } + + /** + * Sets whether the query has to be dry run or not. If set, the query is not executed. If the + * query is valid statistics are returned on how many bytes would be processed. If the query is + * invalid an error is returned. If not set the query is executed. + */ + public Builder dryRun(Boolean dryRun) { + this.dryRun = dryRun; + return this; + } + + /** + * Sets whether to look for the result in the query cache. The query cache is a best-effort + * cache that will be flushed whenever tables in the query are modified. If not specified the + * query cache is used. + * + * @see Query Caching + */ + public Builder useQueryCache(Boolean useQueryCache) { + this.useQueryCache = useQueryCache; + return this; + } + + public QueryRequest build() { + return new QueryRequest(this); + } + } + + private QueryRequest(Builder builder) { + query = builder.query; + maxResults = builder.maxResults; + defaultDataset = builder.defaultDataset; + maxWaitTime = builder.maxWaitTime; + dryRun = builder.dryRun; + useQueryCache = builder.useQueryCache; + } + + /** + * Sets the BigQuery query to be executed. + */ + public String query() { + return query; + } + + /** + * Returns the maximum number of rows of data to return per page of results. + */ + public Long maxResults() { + return maxResults; + } + + /** + * Returns the default dataset to assume for any unqualified table names in the query. + */ + public DatasetId defaultDataset() { + return defaultDataset; + } + + /** + * Returns how long to wait for the query to complete, in milliseconds, before the request times + * out and returns. Note that this is only a timeout for the request, not the query. If the + * query takes longer to run than the timeout value, the call returns without any results and + * with the {@link QueryResponse#jobComplete()} set to {@code false}. You can call + * {@link BigQuery#getQueryResults(JobId, BigQuery.QueryResultsOption...)} to wait for the query + * to complete and read the results. If not set, a wait time of 10000 milliseconds (10 seconds) + * is used. + */ + public Long maxWaitTime() { + return maxWaitTime; + } + + /** + * Returns whether the query has to be dry run or not. If set, the query is not executed. If the + * query is valid statistics are returned on how many bytes would be processed. If the query is + * invalid an error is returned. If not set the query is executed. + */ + public Boolean dryRun() { + return dryRun; + } + + /** + * Returns whether to look for the result in the query cache. The query cache is a best-effort + * cache that will be flushed whenever tables in the query are modified. If not specified the + * query cache is used. + * + * @see Query Caching + */ + public Boolean useQueryCache() { + return useQueryCache; + } + + /** + * Returns a builder for the {@code QueryRequest} object. + */ + public Builder toBuilder() { + return new Builder() + .query(query) + .maxResults(maxResults) + .defaultDataset(defaultDataset) + .maxWaitTime(maxWaitTime) + .dryRun(dryRun) + .useQueryCache(useQueryCache); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("query", query) + .add("maxResults", maxResults) + .add("defaultDataset", defaultDataset) + .add("maxWaitTime", maxWaitTime) + .add("dryRun", dryRun) + .add("useQueryCache", useQueryCache) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(query, maxResults, defaultDataset, maxWaitTime, dryRun, useQueryCache); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof QueryRequest && Objects.equals(toPb(), ((QueryRequest) obj).toPb()); + } + + com.google.api.services.bigquery.model.QueryRequest toPb() { + com.google.api.services.bigquery.model.QueryRequest queryRequestPb = + new com.google.api.services.bigquery.model.QueryRequest().setQuery(query); + if (maxResults != null) { + queryRequestPb.setMaxResults(maxResults); + } + if (defaultDataset != null) { + queryRequestPb.setDefaultDataset(defaultDataset.toPb()); + } + if (maxWaitTime != null) { + queryRequestPb.setTimeoutMs(maxWaitTime); + } + if (dryRun != null) { + queryRequestPb.setDryRun(dryRun); + } + if (useQueryCache != null) { + queryRequestPb.setUseQueryCache(useQueryCache); + } + return queryRequestPb; + } + + /** + * Creates a builder for a {@code QueryRequest} given the BigQuery SQL query to be executed. + */ + public static Builder builder(String query) { + return new Builder().query(query); + } + + /** + * Creates a {@code QueryRequest} object given the BigQuery SQL query to be executed. + */ + public static QueryRequest of(String query) { + return new Builder().query(query).build(); + } + + static QueryRequest fromPb(com.google.api.services.bigquery.model.QueryRequest queryRequestPb) { + Builder builder = builder(queryRequestPb.getQuery()); + if (queryRequestPb.getMaxResults() != null) { + builder.maxResults(queryRequestPb.getMaxResults()); + } + if (queryRequestPb.getDefaultDataset() != null) { + builder.defaultDataset(DatasetId.fromPb(queryRequestPb.getDefaultDataset())); + } + if (queryRequestPb.getTimeoutMs() != null) { + builder.maxWaitTime(queryRequestPb.getTimeoutMs()); + } + if (queryRequestPb.getDryRun() != null) { + builder.dryRun(queryRequestPb.getDryRun()); + } + if (queryRequestPb.getUseQueryCache() != null) { + builder.useQueryCache(queryRequestPb.getUseQueryCache()); + } + return builder.build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResponse.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResponse.java new file mode 100644 index 000000000000..8ef8351d9e1a --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResponse.java @@ -0,0 +1,196 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * Google Cloud BigQuery Query Response. This class contains the results of a Query Job + * ({@link BigQuery#getQueryResults(JobId, BigQuery.QueryResultsOption...)}) or of a + * Query Request ({@link BigQuery#query(QueryRequest)}). + * + *

Example usage of a query response: + *

    {@code
+ *    QueryResponse response = bigquery.query(request);
+ *    while (!response.jobComplete()) {
+ *      Thread.sleep(1000);
+ *      response = bigquery.getQueryResults(response.jobId());
+ *    }
+ *    List executionErrors = response.executionErrors();
+ *    // look for errors in executionErrors
+ *    QueryResult result = response.result();
+ *    Iterator> rowIterator = result.iterateAll();
+ *    while(rowIterator.hasNext()) {
+ *      List row = rowIterator.next();
+ *      // do something with row
+ *    }
+ * }
+ * + * @see Get Query + * Results + * @see Query + */ +public class QueryResponse implements Serializable { + + private static final long serialVersionUID = 3549226764825005655L; + + private final QueryResult result; + private final String etag; + private final JobId jobId; + private final boolean jobComplete; + private final List executionErrors; + + static final class Builder { + + private QueryResult result; + private String etag; + private JobId jobId; + private boolean jobComplete; + private List executionErrors; + + private Builder() {} + + Builder result(QueryResult result) { + this.result = result; + return this; + } + + Builder etag(String etag) { + this.etag = etag; + return this; + } + + Builder jobId(JobId jobId) { + this.jobId = jobId; + return this; + } + + Builder jobComplete(boolean jobComplete) { + this.jobComplete = jobComplete; + return this; + } + + Builder executionErrors(List executionErrors) { + this.executionErrors = executionErrors; + return this; + } + + QueryResponse build() { + return new QueryResponse(this); + } + } + + private QueryResponse(Builder builder) { + this.result = builder.result; + this.etag = builder.etag; + this.jobId = builder.jobId; + this.jobComplete = builder.jobComplete; + this.executionErrors = builder.executionErrors != null ? builder.executionErrors + : ImmutableList.of(); + } + + /** + * Returns the result of the query. Returns {@code null} if {@link #jobComplete()} is {@code + * false}. + */ + public QueryResult result() { + return result; + } + + /** + * Returns the hash of the {@code QueryResponse} resource or {@code null} if not set. + */ + public String etag() { + return etag; + } + + /** + * Returns the identity of the BigQuery Job that was created to run the query. This field will be + * present even if the original request timed out. + */ + public JobId jobId() { + return jobId; + } + + /** + * Returns whether the job running the query has completed or not. If {@link #result()} is not + * {@code null}, this method will always return {@code true}. If this method returns {@code false} + * {@link #result()} returns {@code null}. This method can be used to check if query execution + * completed and results are available. + */ + public boolean jobComplete() { + return jobComplete; + } + + /** + * Returns whether errors and warnings occurred during the execution of the job. If this method + * returns {@code true} it does not necessarily mean that the job has completed or was + * unsuccessful. + */ + public boolean hasErrors() { + return !executionErrors.isEmpty(); + } + + /** + * Returns errors and warnings encountered during the running of the job, if any. Errors here do + * not necessarily mean that the job has completed or was unsuccessful. + */ + public List executionErrors() { + return executionErrors; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("result", result) + .add("etag", etag) + .add("jobId", jobId) + .add("jobComplete", jobComplete) + .add("executionErrors", executionErrors) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(jobId); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + QueryResponse response = (QueryResponse) obj; + return jobComplete == response.jobComplete + && Objects.equals(etag, response.etag) + && Objects.equals(result, response.result) + && Objects.equals(jobId, response.jobId) + && Objects.equals(executionErrors, response.executionErrors); + } + + static Builder builder() { + return new Builder(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResult.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResult.java new file mode 100644 index 000000000000..692abab937a9 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryResult.java @@ -0,0 +1,176 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.gcloud.PageImpl; + +import java.util.List; +import java.util.Objects; + +public class QueryResult extends PageImpl> { + + private static final long serialVersionUID = -4831062717210349818L; + + private final boolean cacheHit; + private final Schema schema; + private final long totalRows; + private final long totalBytesProcessed; + + interface QueryResultsPageFetcher extends PageImpl.NextPageFetcher> { + @Override + QueryResult nextPage(); + } + + static final class Builder { + + private QueryResultsPageFetcher pageFetcher; + private String cursor; + private Iterable> results; + private boolean cacheHit; + private Schema schema; + private long totalRows; + private long totalBytesProcessed; + + private Builder() {} + + Builder cacheHit(boolean cacheHit) { + this.cacheHit = cacheHit; + return this; + } + + Builder schema(Schema schema) { + this.schema = schema; + return this; + } + + Builder totalBytesProcessed(long totalBytesProcessed) { + this.totalBytesProcessed = totalBytesProcessed; + return this; + } + + Builder totalRows(long totalRows) { + this.totalRows = totalRows; + return this; + } + + Builder pageFetcher(QueryResultsPageFetcher pageFetcher) { + this.pageFetcher = pageFetcher; + return this; + } + + Builder cursor(String cursor) { + this.cursor = cursor; + return this; + } + + Builder results(Iterable> results) { + this.results = results; + return this; + } + + QueryResult build() { + return new QueryResult(this); + } + } + + private QueryResult(Builder builder) { + super(builder.pageFetcher, builder.cursor, builder.results != null ? builder.results + : ImmutableList.>of()); + this.cacheHit = builder.cacheHit; + this.schema = builder.schema; + this.totalBytesProcessed = builder.totalBytesProcessed; + this.totalRows = builder.totalRows; + } + + /** + * Returns whether the query result was fetched from the query cache. + * + * @see Query Caching + */ + public boolean cacheHit() { + return cacheHit; + } + + /** + * Returns the schema of the results. This is present only when the query completes successfully. + */ + public Schema schema() { + return schema; + } + + /** + * Returns the total number of bytes processed for the query. If this query was a dry run, this is + * the number of bytes that would be processed if the query were run. + */ + public long totalBytesProcessed() { + return totalBytesProcessed; + } + + /** + * Returns the total number of rows in the complete query result set, which can be more than the + * number of rows in the first page of results returned by {@link #values()}. Returns {@code 0} + * if the query was a dry run. + */ + public long totalRows() { + return totalRows; + } + + @Override + public QueryResult nextPage() { + return (QueryResult) super.nextPage(); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("rows", values()) + .add("cacheHit", cacheHit) + .add("schema", schema) + .add("totalBytesProcessed", totalBytesProcessed) + .add("totalRows", totalRows) + .add("cursor", nextPageCursor()) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), cacheHit, schema, totalBytesProcessed, totalRows); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + QueryResult response = (QueryResult) obj; + return Objects.equals(nextPageCursor(), response.nextPageCursor()) + && Objects.equals(values(), response.values()) + && Objects.equals(schema, response.schema) + && totalRows == response.totalRows + && totalBytesProcessed == response.totalBytesProcessed + && cacheHit == response.cacheHit; + } + + static Builder builder() { + return new Builder(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Schema.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Schema.java new file mode 100644 index 000000000000..787bb0d7f35f --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Schema.java @@ -0,0 +1,159 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.TableFieldSchema; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.io.Serializable; +import java.util.List; +import java.util.Objects; + +/** + * This class represents the schema for a Google BigQuery Table or data source. + */ +public class Schema implements Serializable { + + static final Function + FROM_PB_FUNCTION = new Function() { + @Override + public Schema apply(com.google.api.services.bigquery.model.TableSchema pb) { + return Schema.fromPb(pb); + } + }; + static final Function + TO_PB_FUNCTION = new Function() { + @Override + public com.google.api.services.bigquery.model.TableSchema apply(Schema schema) { + return schema.toPb(); + } + }; + + private static final long serialVersionUID = 2007400596384553696L; + + private final List fields; + + public static final class Builder { + + private List fields; + + private Builder() {} + + /** + * Adds a field's schema to the table's schema. + */ + public Builder addField(Field field) { + if (fields == null) { + fields = Lists.newArrayList(); + } + fields.add(checkNotNull(field)); + return this; + } + + /** + * Sets table's schema fields. + */ + public Builder fields(Iterable fields) { + this.fields = Lists.newArrayList(checkNotNull(fields)); + return this; + } + + /** + * Sets table's schema fields. + */ + public Builder fields(Field... fields) { + this.fields = Lists.newArrayList(fields); + return this; + } + + /** + * Creates an {@code Schema} object. + */ + public Schema build() { + return new Schema(this); + } + } + + private Schema(Builder builder) { + this.fields = builder.fields != null ? ImmutableList.copyOf(builder.fields) + : ImmutableList.of(); + } + + /** + * Returns the fields in the current table schema. + */ + public List fields() { + return fields; + } + + /** + * Returns a builder for the {@code Schema} object. + */ + public Builder toBuilder() { + return builder().fields(fields); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("fields", fields) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(fields); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof Schema && Objects.equals(toPb(), ((Schema) obj).toPb()); + } + + com.google.api.services.bigquery.model.TableSchema toPb() { + com.google.api.services.bigquery.model.TableSchema tableSchemaPb = + new com.google.api.services.bigquery.model.TableSchema(); + if (fields != null) { + List fieldsPb = Lists.transform(fields, Field.TO_PB_FUNCTION); + tableSchemaPb.setFields(fieldsPb); + } + return tableSchemaPb; + } + + public static Builder builder() { + return new Builder(); + } + + public static Schema of(Iterable fields) { + return builder().fields(fields).build(); + } + + public static Schema of(Field... fields) { + return builder().fields(fields).build(); + } + + static Schema fromPb(com.google.api.services.bigquery.model.TableSchema tableSchemaPb) { + return Schema.of(Lists.transform(tableSchemaPb.getFields(), Field.FROM_PB_FUNCTION)); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableId.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableId.java new file mode 100644 index 000000000000..7a4e0bbb38b4 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableId.java @@ -0,0 +1,118 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.TableReference; +import com.google.common.base.Function; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Google BigQuery Table identity. + */ +public class TableId implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public TableId apply(TableReference pb) { + return TableId.fromPb(pb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public TableReference apply(TableId tableId) { + return tableId.toPb(); + } + }; + private static final long serialVersionUID = -6186254820908152300L; + + private final String project; + private final String dataset; + private final String table; + + /** + * Returns project's user-defined id. + */ + public String project() { + return project; + } + + /** + * Returns dataset's user-defined id. + */ + public String dataset() { + return dataset; + } + + /** + * Returns table's user-defined id. + */ + public String table() { + return table; + } + + private TableId(String project, String dataset, String table) { + this.project = project; + this.dataset = dataset; + this.table = table; + } + + /** + * Creates a table identity given project's, dataset's and table's user-defined ids. + */ + public static TableId of(String project, String dataset, String table) { + return new TableId(checkNotNull(project), checkNotNull(dataset), checkNotNull(table)); + } + + /** + * Creates a table identity given dataset's and table's user-defined ids. + */ + public static TableId of(String dataset, String table) { + return new TableId(null, checkNotNull(dataset), checkNotNull(table)); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof TableId && Objects.equals(toPb(), ((TableId) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(project, dataset, table); + } + + @Override + public String toString() { + return toPb().toString(); + } + + TableReference toPb() { + return new TableReference().setProjectId(project).setDatasetId(dataset).setTableId(table); + } + + static TableId fromPb(TableReference tableRef) { + return new TableId( + tableRef.getProjectId(), + tableRef.getDatasetId(), + tableRef.getTableId()); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java new file mode 100644 index 000000000000..05fb6908a51b --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java @@ -0,0 +1,230 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import com.google.api.services.bigquery.model.Streamingbuffer; +import com.google.api.services.bigquery.model.Table; +import com.google.common.base.MoreObjects; +import com.google.common.base.MoreObjects.ToStringHelper; + +import java.io.Serializable; +import java.math.BigInteger; +import java.util.Objects; + +/** + * A Google BigQuery Table information. A BigQuery table is a standard, two-dimensional table with + * individual records organized in rows, and a data type assigned to each column (also called a + * field). Individual fields within a record may contain nested and repeated children fields. Every + * table is described by a schema that describes field names, types, and other information. + * + * @see Managing Tables + */ +public class TableInfo extends BaseTableInfo { + + private static final long serialVersionUID = -5910575573063546949L; + + private final String location; + private final StreamingBuffer streamingBuffer; + + /** + * Google BigQuery Table's Streaming Buffer information. This class contains information on a + * table's streaming buffer as the estimated size in number of rows/bytes. + */ + public static class StreamingBuffer implements Serializable { + + private static final long serialVersionUID = -6713971364725267597L; + private final long estimatedRows; + private final long estimatedBytes; + private final long oldestEntryTime; + + StreamingBuffer(long estimatedRows, long estimatedBytes, long oldestEntryTime) { + this.estimatedRows = estimatedRows; + this.estimatedBytes = estimatedBytes; + this.oldestEntryTime = oldestEntryTime; + } + + /** + * Returns a lower-bound estimate of the number of rows currently in the streaming buffer. + */ + public long estimatedRows() { + return estimatedRows; + } + + /** + * Returns a lower-bound estimate of the number of bytes currently in the streaming buffer. + */ + public long estimatedBytes() { + return estimatedBytes; + } + + /** + * Returns the timestamp of the oldest entry in the streaming buffer, in milliseconds since + * epoch. + */ + public long oldestEntryTime() { + return oldestEntryTime; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("estimatedRows", estimatedRows) + .add("estimatedBytes", estimatedBytes) + .add("oldestEntryTime", oldestEntryTime) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(estimatedRows, estimatedBytes, oldestEntryTime); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof StreamingBuffer + && Objects.equals(toPb(), ((StreamingBuffer) obj).toPb()); + } + + Streamingbuffer toPb() { + return new Streamingbuffer() + .setEstimatedBytes(BigInteger.valueOf(estimatedBytes)) + .setEstimatedRows(BigInteger.valueOf(estimatedRows)) + .setOldestEntryTime(BigInteger.valueOf(oldestEntryTime)); + } + + static StreamingBuffer fromPb(Streamingbuffer streamingBufferPb) { + return new StreamingBuffer(streamingBufferPb.getEstimatedRows().longValue(), + streamingBufferPb.getEstimatedBytes().longValue(), + streamingBufferPb.getOldestEntryTime().longValue()); + } + } + + public static final class Builder extends BaseTableInfo.Builder { + + private String location; + private StreamingBuffer streamingBuffer; + + private Builder() {} + + private Builder(TableInfo tableInfo) { + super(tableInfo); + this.location = tableInfo.location; + this.streamingBuffer = tableInfo.streamingBuffer; + } + + protected Builder(Table tablePb) { + super(tablePb); + this.location = tablePb.getLocation(); + if (tablePb.getStreamingBuffer() != null) { + this.streamingBuffer = StreamingBuffer.fromPb(tablePb.getStreamingBuffer()); + } + } + + Builder location(String location) { + this.location = location; + return self(); + } + + Builder streamingBuffer(StreamingBuffer streamingBuffer) { + this.streamingBuffer = streamingBuffer; + return self(); + } + + /** + * Creates a {@code TableInfo} object. + */ + @Override + public TableInfo build() { + return new TableInfo(this); + } + } + + private TableInfo(Builder builder) { + super(builder); + this.location = builder.location; + this.streamingBuffer = builder.streamingBuffer; + } + + /** + * Returns the geographic location where the table should reside. This value is inherited from the + * dataset. + * + * @see + * Dataset Location + */ + public String location() { + return location; + } + + /** + * Returns information on the table's streaming buffer if any exists. Returns {@code null} if no + * streaming buffer exists. + */ + public StreamingBuffer streamingBuffer() { + return streamingBuffer; + } + + /** + * Returns a builder for a BigQuery Table. + * + * @param tableId table id + * @param schema the schema of the table + */ + public static Builder builder(TableId tableId, Schema schema) { + return new Builder().tableId(tableId).type(Type.TABLE).schema(schema); + } + + /** + * Creates BigQuery table given its type. + * + * @param tableId table id + * @param schema the schema of the table + */ + public static TableInfo of(TableId tableId, Schema schema) { + return builder(tableId, schema).build(); + } + + /** + * Returns a builder for the {@code TableInfo} object. + */ + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("location", location) + .add("streamingBuffer", streamingBuffer); + } + + @Override + Table toPb() { + Table tablePb = super.toPb(); + tablePb.setLocation(location); + if (streamingBuffer != null) { + tablePb.setStreamingBuffer(streamingBuffer.toPb()); + } + return tablePb; + } + + @SuppressWarnings("unchecked") + static TableInfo fromPb(Table tablePb) { + return new Builder(tablePb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/UserDefinedFunction.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/UserDefinedFunction.java new file mode 100644 index 000000000000..2135e0ddc941 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/UserDefinedFunction.java @@ -0,0 +1,151 @@ +package com.google.gcloud.bigquery; + +import com.google.api.services.bigquery.model.UserDefinedFunctionResource; +import com.google.common.base.Function; +import com.google.common.base.MoreObjects; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Google BigQuery User Defined Function. BigQuery supports user-defined functions (UDFs) written in + * JavaScript. A UDF is similar to the "Map" function in a MapReduce: it takes a single row as input + * and produces zero or more rows as output. The output can potentially have a different schema than + * the input. + * + * @see User-Defined Functions + * + */ +public abstract class UserDefinedFunction implements Serializable { + + static final Function FROM_PB_FUNCTION = + new Function() { + @Override + public UserDefinedFunction apply(UserDefinedFunctionResource userDefinedFunctionPb) { + return UserDefinedFunction.fromPb(userDefinedFunctionPb); + } + }; + static final Function TO_PB_FUNCTION = + new Function() { + @Override + public UserDefinedFunctionResource apply(UserDefinedFunction userDefinedFunction) { + return userDefinedFunction.toPb(); + } + }; + + private static final long serialVersionUID = 8704260561787440287L; + + /** + * Type of user-defined function. User defined functions can be provided inline as code blobs + * ({@link #INLINE}) or as a Google Cloud Storage URI ({@link #FROM_URI}). + */ + public enum Type { + INLINE, + FROM_URI + } + + private final Type type; + private final String content; + + UserDefinedFunction(Type type, String content) { + this.type = type; + this.content = content; + } + + public Type type() { + return type; + } + + /** + * If {@link #type()} is {@link Type#INLINE} this method returns a code blob. If {@link #type()} + * is {@link Type#FROM_URI} the method returns a Google Cloud Storage URI (e.g. gs://bucket/path). + */ + public String content() { + return content; + } + + /** + * A Google Cloud BigQuery user-defined function, as a code blob. + */ + static final class InlineFunction extends UserDefinedFunction { + + private static final long serialVersionUID = 1083672109192091686L; + + InlineFunction(String inlineCode) { + super(Type.INLINE, inlineCode); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this).add("inlineCode", content()).toString(); + } + + @Override + public com.google.api.services.bigquery.model.UserDefinedFunctionResource toPb() { + return new com.google.api.services.bigquery.model.UserDefinedFunctionResource() + .setInlineCode(content()); + } + } + + /** + * A Google Cloud BigQuery user-defined function, as an URI to Google Cloud Storage. + */ + static final class UriFunction extends UserDefinedFunction { + + private static final long serialVersionUID = 4660331691852223839L; + + UriFunction(String functionUri) { + super(Type.FROM_URI, functionUri); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this).add("functionUri", content()).toString(); + } + + @Override + public com.google.api.services.bigquery.model.UserDefinedFunctionResource toPb() { + return new com.google.api.services.bigquery.model.UserDefinedFunctionResource() + .setResourceUri(content()); + } + } + + @Override + public int hashCode() { + return Objects.hash(type, content); + } + + @Override + public boolean equals(Object obj) { + return obj instanceof UserDefinedFunction + && Objects.equals(toPb(), ((UserDefinedFunction) obj).toPb()); + } + + public abstract com.google.api.services.bigquery.model.UserDefinedFunctionResource toPb(); + + /** + * Creates a Google Cloud BigQuery user-defined function given a code blob. + */ + public static UserDefinedFunction inline(String functionDefinition) { + return new InlineFunction(functionDefinition); + } + + /** + * Creates a Google Cloud BigQuery user-defined function given a Google Cloud Storage URI (e.g. + * gs://bucket/path). + */ + public static UserDefinedFunction fromUri(String functionDefinition) { + return new UriFunction(functionDefinition); + } + + static UserDefinedFunction fromPb( + com.google.api.services.bigquery.model.UserDefinedFunctionResource pb) { + if (pb.getInlineCode() != null) { + return new InlineFunction(pb.getInlineCode()); + } + if (pb.getResourceUri() != null) { + return new UriFunction(pb.getResourceUri()); + } + throw new IllegalArgumentException("Invalid user-defined function"); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ViewInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ViewInfo.java new file mode 100644 index 000000000000..771a7a679c11 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/ViewInfo.java @@ -0,0 +1,234 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.services.bigquery.model.Table; +import com.google.api.services.bigquery.model.ViewDefinition; +import com.google.common.base.MoreObjects.ToStringHelper; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; + +import java.util.List; + +/** + * Google BigQuery View Table information. BigQuery's views are logical views, not materialized + * views, which means that the query that defines the view is re-executed every time the view is + * queried. + * + * @see Views + */ +public class ViewInfo extends BaseTableInfo { + + private static final long serialVersionUID = 7567772157817454901L; + + private final String query; + private final List userDefinedFunctions; + + public static final class Builder extends BaseTableInfo.Builder { + + private String query; + private List userDefinedFunctions; + + private Builder() {} + + private Builder(ViewInfo viewInfo) { + super(viewInfo); + this.query = viewInfo.query; + this.userDefinedFunctions = viewInfo.userDefinedFunctions; + } + + protected Builder(Table tablePb) { + super(tablePb); + ViewDefinition viewPb = tablePb.getView(); + if (viewPb != null) { + this.query = viewPb.getQuery(); + if (viewPb.getUserDefinedFunctionResources() != null) { + this.userDefinedFunctions = Lists.transform(viewPb.getUserDefinedFunctionResources(), + UserDefinedFunction.FROM_PB_FUNCTION); + } + } + } + + /** + * Sets the query used to create the view. + */ + public Builder query(String query) { + this.query = checkNotNull(query); + return self(); + } + + /** + * Sets user defined functions that can be used by {@link #query()}. + * + * @see User-Defined + * Functions + */ + public Builder userDefinedFunctions(List userDefinedFunctions) { + this.userDefinedFunctions = ImmutableList.copyOf(checkNotNull(userDefinedFunctions)); + return self(); + } + + /** + * Sets user defined functions that can be used by {@link #query()}. + * + * @see User-Defined + * Functions + */ + public Builder userDefinedFunctions(UserDefinedFunction... userDefinedFunctions) { + this.userDefinedFunctions = ImmutableList.copyOf(userDefinedFunctions); + return self(); + } + + /** + * Creates a {@code ViewInfo} object. + */ + @Override + public ViewInfo build() { + return new ViewInfo(this); + } + } + + private ViewInfo(Builder builder) { + super(builder); + this.query = builder.query; + this.userDefinedFunctions = builder.userDefinedFunctions; + } + + /** + * Returns the query used to create the view. + */ + public String query() { + return query; + } + + /** + * Returns user defined functions that can be used by {@link #query()}. Returns {@code null} if + * not set. + * + * @see User-Defined Functions + * + */ + public List userDefinedFunctions() { + return userDefinedFunctions; + } + + /** + * Returns a builder for the {@code ViewInfo} object. + */ + @Override + public Builder toBuilder() { + return new Builder(this); + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("query", query) + .add("userDefinedFunctions", userDefinedFunctions); + } + + @Override + Table toPb() { + Table tablePb = super.toPb(); + ViewDefinition viewDefinition = new ViewDefinition().setQuery(query); + if (userDefinedFunctions != null) { + viewDefinition.setUserDefinedFunctionResources(Lists.transform(userDefinedFunctions, + UserDefinedFunction.TO_PB_FUNCTION)); + } + tablePb.setView(viewDefinition); + return tablePb; + } + + /** + * Returns a builder for a BigQuery View Table. + * + * @param tableId table id + * @param query the query used to generate the table + */ + public static Builder builder(TableId tableId, String query) { + return new Builder().tableId(tableId).type(Type.VIEW).query(query); + } + + /** + * Returns a builder for a BigQuery View Table. + * + * @param table table id + * @param query the query used to generate the table + * @param functions user-defined functions that can be used by the query + */ + public static Builder builder(TableId table, String query, List functions) { + return new Builder() + .tableId(table) + .type(Type.VIEW) + .userDefinedFunctions(functions) + .query(query); + } + + /** + * Returns a builder for a BigQuery View Table. + * + * @param table table id + * @param query the query used to generate the table + * @param functions user-defined functions that can be used by the query + */ + public static Builder builder(TableId table, String query, UserDefinedFunction... functions) { + return new Builder() + .tableId(table) + .type(Type.VIEW) + .userDefinedFunctions(functions) + .query(query); + } + + /** + * Creates a BigQuery View given table identity and query. + * + * @param tableId table id + * @param query the query used to generate the table + */ + public static ViewInfo of(TableId tableId, String query) { + return builder(tableId, query).build(); + } + + /** + * Creates a BigQuery View given table identity, a query and some user-defined functions. + * + * @param table table id + * @param query the query used to generate the table + * @param functions user-defined functions that can be used by the query + */ + public static ViewInfo of(TableId table, String query, List functions) { + return builder(table, query, functions).build(); + } + + /** + * Creates a BigQuery View given table identity, a query and some user-defined functions. + * + * @param table table id + * @param query the query used to generate the table + * @param functions user-defined functions that can be used by the query + */ + public static ViewInfo of(TableId table, String query, UserDefinedFunction... functions) { + return builder(table, query, functions).build(); + } + + @SuppressWarnings("unchecked") + static ViewInfo fromPb(Table tablePb) { + return new Builder(tablePb).build(); + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/package-info.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/package-info.java new file mode 100644 index 000000000000..4acaa40ca851 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/package-info.java @@ -0,0 +1,27 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * A client to Google Cloud BigQuery. + * + *

A simple usage example: + *

{@code
+ * //TODO(mziccard): add code example
+ * }
+ * + * @see Google Cloud BigQuery + */ +package com.google.gcloud.bigquery; diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/RemoteBigQueryHelper.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/RemoteBigQueryHelper.java new file mode 100644 index 000000000000..22fa62a7b86e --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/RemoteBigQueryHelper.java @@ -0,0 +1,140 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery.testing; + +import com.google.gcloud.AuthCredentials; +import com.google.gcloud.RetryParams; +import com.google.gcloud.bigquery.BigQuery; +import com.google.gcloud.bigquery.BigQueryException; +import com.google.gcloud.bigquery.BigQueryOptions; + +import java.io.IOException; +import java.io.InputStream; +import java.util.UUID; +import java.util.logging.Level; +import java.util.logging.Logger; + +/** + * Utility to create a remote BigQuery configuration for testing. BigQuery options can be obtained + * via the {@link #options()} method. Returned options have custom + * {@link BigQueryOptions#retryParams()}: {@link RetryParams#retryMaxAttempts()} is {@code 10}, + * {@link RetryParams#retryMinAttempts()} is {@code 6}, {@link RetryParams#maxRetryDelayMillis()} is + * {@code 30000}, {@link RetryParams#totalRetryPeriodMillis()} is {@code 120000} and + * {@link RetryParams#initialRetryDelayMillis()} is {@code 250}. + * {@link BigQueryOptions#connectTimeout()} and {@link BigQueryOptions#readTimeout()} are both set + * to {@code 60000}. + */ +public class RemoteBigQueryHelper { + + private static final Logger log = Logger.getLogger(RemoteBigQueryHelper.class.getName()); + private static final String DATASET_NAME_PREFIX = "gcloud_test_dataset_temp_"; + private final BigQueryOptions options; + + private RemoteBigQueryHelper(BigQueryOptions options) { + this.options = options; + } + + /** + * Returns a {@link BigQueryOptions} object to be used for testing. + */ + public BigQueryOptions options() { + return options; + } + + /** + * Deletes a dataset, even if non-empty. + * + * @param bigquery the BigQuery service to be used to issue the delete request + * @param dataset the dataset to be deleted + * @return {@code true} if deletion succeeded, {@code false} if the dataset was not found. + * @throws BigQueryException upon failure + */ + public static boolean forceDelete(BigQuery bigquery, String dataset) { + return bigquery.delete(dataset, BigQuery.DatasetDeleteOption.deleteContents()); + } + + /** + * Returns a dataset name generated using a random UUID. + */ + public static String generateDatasetName() { + return DATASET_NAME_PREFIX + UUID.randomUUID().toString().replace('-', '_'); + } + + /** + * Creates a {@code RemoteBigQueryHelper} object for the given project id and JSON key input + * stream. + * + * @param projectId id of the project to be used for running the tests + * @param keyStream input stream for a JSON key + * @return A {@code RemoteBigQueryHelper} object for the provided options. + * @throws BigQueryHelperException if {@code keyStream} is not a valid JSON key stream + */ + public static RemoteBigQueryHelper create(String projectId, InputStream keyStream) + throws BigQueryHelperException { + try { + BigQueryOptions bigqueryOptions = BigQueryOptions.builder() + .authCredentials(AuthCredentials.createForJson(keyStream)) + .projectId(projectId) + .retryParams(retryParams()) + .connectTimeout(60000) + .readTimeout(60000) + .build(); + return new RemoteBigQueryHelper(bigqueryOptions); + } catch (IOException ex) { + if (log.isLoggable(Level.WARNING)) { + log.log(Level.WARNING, ex.getMessage()); + } + throw BigQueryHelperException.translate(ex); + } + } + + /** + * Creates a {@code RemoteBigQueryHelper} object using default project id and authentication + * credentials. + */ + public static RemoteBigQueryHelper create() { + BigQueryOptions bigqueryOptions = BigQueryOptions.builder() + .retryParams(retryParams()) + .connectTimeout(60000) + .readTimeout(60000) + .build(); + return new RemoteBigQueryHelper(bigqueryOptions); + } + + private static RetryParams retryParams() { + return RetryParams.builder() + .retryMaxAttempts(10) + .retryMinAttempts(6) + .maxRetryDelayMillis(30000) + .totalRetryPeriodMillis(120000) + .initialRetryDelayMillis(250) + .build(); + } + + public static class BigQueryHelperException extends RuntimeException { + + private static final long serialVersionUID = 3984993496060055562L; + + public BigQueryHelperException(String message, Throwable cause) { + super(message, cause); + } + + public static BigQueryHelperException translate(Exception ex) { + return new BigQueryHelperException(ex.getMessage(), ex); + } + } +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/package-info.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/package-info.java new file mode 100644 index 000000000000..9ca792ecd77d --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/package-info.java @@ -0,0 +1,38 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * A testing helper for Google BigQuery. + * + *

A simple usage example: + * + *

Before the test: + *

 {@code
+ * RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create();
+ * BigQuery bigquery = bigqueryHelper.options().service();
+ * String dataset = RemoteBigQueryHelper.generateDatasetName();
+ * bigquery.create(DatasetInfo.builder(dataset).build());
+ * } 
+ * + *

After the test: + *

 {@code
+ * RemoteBigQueryHelper.forceDelete(bigquery, DATASET);
+ * }
+ * + * @see + * gcloud-java tools for testing + */ +package com.google.gcloud.bigquery.testing; diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpc.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpc.java new file mode 100644 index 000000000000..d53ad838b802 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpc.java @@ -0,0 +1,188 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.spi; + +import com.google.api.services.bigquery.model.Dataset; +import com.google.api.services.bigquery.model.GetQueryResultsResponse; +import com.google.api.services.bigquery.model.Job; +import com.google.api.services.bigquery.model.QueryRequest; +import com.google.api.services.bigquery.model.QueryResponse; +import com.google.api.services.bigquery.model.Table; +import com.google.api.services.bigquery.model.TableDataInsertAllRequest; +import com.google.api.services.bigquery.model.TableDataInsertAllResponse; +import com.google.api.services.bigquery.model.TableRow; +import com.google.gcloud.bigquery.BigQueryException; + +import java.util.Map; + +public interface BigQueryRpc { + + // These options are part of the Google Cloud BigQuery query parameters + enum Option { + FIELDS("fields"), + DELETE_CONTENTS("deleteContents"), + ALL_DATASETS("all"), + ALL_USERS("allUsers"), + MAX_RESULTS("maxResults"), + PAGE_TOKEN("pageToken"), + START_INDEX("startIndex"), + STATE_FILTER("stateFilter"), + TIMEOUT("timeoutMs"); + + private final String value; + + Option(String value) { + this.value = value; + } + + public String value() { + return value; + } + + @SuppressWarnings("unchecked") + T get(Map options) { + return (T) options.get(this); + } + + String getString(Map options) { + return get(options); + } + + Long getLong(Map options) { + return get(options); + } + + Boolean getBoolean(Map options) { + return get(options); + } + } + + class Tuple { + + private final X x; + private final Y y; + + private Tuple(X x, Y y) { + this.x = x; + this.y = y; + } + + public static Tuple of(X x, Y y) { + return new Tuple<>(x, y); + } + + public X x() { + return x; + } + + public Y y() { + return y; + } + } + + /** + * Returns the requested dataset or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + Dataset getDataset(String datasetId, Map options) throws BigQueryException; + + /** + * Lists the project's datasets. Partial information is returned on a dataset (datasetReference, + * friendlyName and id). To get full information use {@link #getDataset(String, Map)}. + * + * @throws BigQueryException upon failure + */ + Tuple> listDatasets(Map options) throws BigQueryException; + + Dataset create(Dataset dataset, Map options) throws BigQueryException; + + Table create(Table table, Map options) throws BigQueryException; + + Job create(Job job, Map options) throws BigQueryException; + + /** + * Delete the requested dataset. + * + * @return {@code true} if dataset was deleted, {@code false} if it was not found + * @throws BigQueryException upon failure + */ + boolean deleteDataset(String datasetId, Map options) throws BigQueryException; + + Dataset patch(Dataset dataset, Map options) throws BigQueryException; + + Table patch(Table table, Map options) throws BigQueryException; + + /** + * Returns the requested table or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + Table getTable(String datasetId, String tableId, Map options) throws BigQueryException; + + /** + * Lists the dataset's tables. Partial information is returned on a table (tableReference, + * friendlyName, id and type). To get full information use {@link #getTable(String, String, Map)}. + * + * @throws BigQueryException upon failure + */ + Tuple> listTables(String dataset, Map options) + throws BigQueryException; + + /** + * Delete the requested table. + * + * @return {@code true} if table was deleted, {@code false} if it was not found + * @throws BigQueryException upon failure + */ + boolean deleteTable(String datasetId, String tableId) throws BigQueryException; + + TableDataInsertAllResponse insertAll(String datasetId, String tableId, + TableDataInsertAllRequest request) throws BigQueryException; + + Tuple> listTableData(String datasetId, String tableId, + Map options) throws BigQueryException; + + /** + * Returns the requested job or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + Job getJob(String jobId, Map options) throws BigQueryException; + + /** + * Lists the project's jobs. + * + * @throws BigQueryException upon failure + */ + Tuple> listJobs(Map options) throws BigQueryException; + + /** + * Sends a job cancel request. This call will return immediately, and the client will need to poll + * for the job status to see if the cancel completed successfully. + * + * @return {@code true} if cancel was requested successfully, {@code false} if the job was not + * found + * @throws BigQueryException upon failure + */ + boolean cancel(String jobId) throws BigQueryException; + + GetQueryResultsResponse getQueryResults(String jobId, Map options) + throws BigQueryException; + + QueryResponse query(QueryRequest request) throws BigQueryException; +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpcFactory.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpcFactory.java new file mode 100644 index 000000000000..2706868756a5 --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpcFactory.java @@ -0,0 +1,26 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.spi; + +import com.google.gcloud.bigquery.BigQueryOptions; + +/** + * An interface for BigQuery RPC factory. + * Implementation will be loaded via {@link java.util.ServiceLoader}. + */ +public interface BigQueryRpcFactory extends ServiceRpcFactory { +} diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java new file mode 100644 index 000000000000..74d1c038a6bc --- /dev/null +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java @@ -0,0 +1,412 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except + * in compliance with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ + +package com.google.gcloud.spi; + +import static com.google.gcloud.spi.BigQueryRpc.Option.DELETE_CONTENTS; +import static com.google.gcloud.spi.BigQueryRpc.Option.FIELDS; +import static com.google.gcloud.spi.BigQueryRpc.Option.MAX_RESULTS; +import static com.google.gcloud.spi.BigQueryRpc.Option.PAGE_TOKEN; +import static com.google.gcloud.spi.BigQueryRpc.Option.START_INDEX; +import static com.google.gcloud.spi.BigQueryRpc.Option.TIMEOUT; +import static java.net.HttpURLConnection.HTTP_NOT_FOUND; + +import com.google.api.client.googleapis.json.GoogleJsonError; +import com.google.api.client.googleapis.json.GoogleJsonResponseException; +import com.google.api.client.http.HttpRequestInitializer; +import com.google.api.client.http.HttpTransport; +import com.google.api.client.json.jackson.JacksonFactory; +import com.google.api.services.bigquery.Bigquery; +import com.google.api.services.bigquery.model.Dataset; +import com.google.api.services.bigquery.model.DatasetList; +import com.google.api.services.bigquery.model.DatasetReference; +import com.google.api.services.bigquery.model.GetQueryResultsResponse; +import com.google.api.services.bigquery.model.Job; +import com.google.api.services.bigquery.model.JobList; +import com.google.api.services.bigquery.model.JobStatus; +import com.google.api.services.bigquery.model.QueryRequest; +import com.google.api.services.bigquery.model.QueryResponse; +import com.google.api.services.bigquery.model.Table; +import com.google.api.services.bigquery.model.TableDataInsertAllRequest; +import com.google.api.services.bigquery.model.TableDataInsertAllResponse; +import com.google.api.services.bigquery.model.TableDataList; +import com.google.api.services.bigquery.model.TableList; +import com.google.api.services.bigquery.model.TableReference; +import com.google.api.services.bigquery.model.TableRow; +import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Iterables; + +import com.google.gcloud.bigquery.BigQueryException; +import com.google.gcloud.bigquery.BigQueryOptions; + +import java.io.IOException; +import java.math.BigInteger; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class DefaultBigQueryRpc implements BigQueryRpc { + + public static final String DEFAULT_PROJECTION = "full"; + // see: https://cloud.google.com/bigquery/troubleshooting-errors + private static final Set RETRYABLE_CODES = ImmutableSet.of(500, 502, 503, 504); + private final BigQueryOptions options; + private final Bigquery bigquery; + + public DefaultBigQueryRpc(BigQueryOptions options) { + HttpTransport transport = options.httpTransportFactory().create(); + HttpRequestInitializer initializer = options.httpRequestInitializer(); + this.options = options; + bigquery = new Bigquery.Builder(transport, new JacksonFactory(), initializer) + .setRootUrl(options.host()) + .setApplicationName(options.applicationName()) + .build(); + } + + private static BigQueryException translate(IOException exception) { + BigQueryException translated; + if (exception instanceof GoogleJsonResponseException + && ((GoogleJsonResponseException) exception).getDetails() != null) { + translated = translate(((GoogleJsonResponseException) exception).getDetails()); + } else { + translated = + new BigQueryException(BigQueryException.UNKNOWN_CODE, exception.getMessage(), false); + } + translated.initCause(exception); + return translated; + } + + private static BigQueryException translate(GoogleJsonError exception) { + boolean retryable = RETRYABLE_CODES.contains(exception.getCode()); + return new BigQueryException(exception.getCode(), exception.getMessage(), retryable); + } + + @Override + public Dataset getDataset(String datasetId, Map options) throws BigQueryException { + try { + return bigquery.datasets() + .get(this.options.projectId(), datasetId) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return null; + } + throw serviceException; + } + } + + @Override + public Tuple> listDatasets(Map options) + throws BigQueryException { + try { + DatasetList datasetsList = bigquery.datasets() + .list(this.options.projectId()) + .setAll(Option.ALL_DATASETS.getBoolean(options)) + .setMaxResults(MAX_RESULTS.getLong(options)) + .setPageToken(PAGE_TOKEN.getString(options)) + .execute(); + Iterable datasets = datasetsList.getDatasets(); + return Tuple.of(datasetsList.getNextPageToken(), + Iterables.transform(datasets != null ? datasets + : ImmutableList.of(), + new Function() { + @Override + public Dataset apply(DatasetList.Datasets datasetPb) { + return new Dataset() + .setDatasetReference(datasetPb.getDatasetReference()) + .setFriendlyName(datasetPb.getFriendlyName()) + .setId(datasetPb.getId()) + .setKind(datasetPb.getKind()); + } + })); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Dataset create(Dataset dataset, Map options) throws BigQueryException { + try { + return bigquery.datasets().insert(this.options.projectId(), dataset) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Table create(Table table, Map options) + throws BigQueryException { + try { + // unset the type, as it is output only + table.setType(null); + return bigquery.tables() + .insert(this.options.projectId(), table.getTableReference().getDatasetId(), table) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Job create(Job job, Map options) throws BigQueryException { + try { + return bigquery.jobs() + .insert(this.options.projectId(), job) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public boolean deleteDataset(String datasetId, Map options) throws BigQueryException { + try { + bigquery.datasets().delete(this.options.projectId(), datasetId) + .setDeleteContents(DELETE_CONTENTS.getBoolean(options)) + .execute(); + return true; + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return false; + } + throw serviceException; + } + } + + @Override + public Dataset patch(Dataset dataset, Map options) throws BigQueryException { + try { + DatasetReference reference = dataset.getDatasetReference(); + return bigquery.datasets() + .patch(this.options.projectId(), reference.getDatasetId(), dataset) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Table patch(Table table, Map options) throws BigQueryException { + try { + // unset the type, as it is output only + table.setType(null); + TableReference reference = table.getTableReference(); + return bigquery.tables() + .patch(this.options.projectId(), reference.getDatasetId(), reference.getTableId(), table) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Table getTable(String datasetId, String tableId, Map options) + throws BigQueryException { + try { + return bigquery.tables() + .get(this.options.projectId(), datasetId, tableId) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return null; + } + throw serviceException; + } + } + + @Override + public Tuple> listTables(String datasetId, Map options) + throws BigQueryException { + try { + TableList tableList = bigquery.tables() + .list(this.options.projectId(), datasetId) + .setMaxResults(MAX_RESULTS.getLong(options)) + .setPageToken(PAGE_TOKEN.getString(options)) + .execute(); + Iterable tables = tableList.getTables(); + return Tuple.of(tableList.getNextPageToken(), + Iterables.transform(tables != null ? tables : ImmutableList.of(), + new Function() { + @Override + public Table apply(TableList.Tables tablePb) { + return new Table() + .setFriendlyName(tablePb.getFriendlyName()) + .setId(tablePb.getId()) + .setKind(tablePb.getKind()) + .setTableReference(tablePb.getTableReference()) + .setType(tablePb.getType()); + } + })); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public boolean deleteTable(String datasetId, String tableId) throws BigQueryException { + try { + bigquery.tables().delete(this.options.projectId(), datasetId, tableId).execute(); + return true; + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return false; + } + throw serviceException; + } + } + + @Override + public TableDataInsertAllResponse insertAll(String datasetId, String tableId, + TableDataInsertAllRequest request) throws BigQueryException { + try { + return bigquery.tabledata() + .insertAll(this.options.projectId(), datasetId, tableId, request) + .execute(); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Tuple> listTableData(String datasetId, String tableId, + Map options) throws BigQueryException { + try { + TableDataList tableDataList = bigquery.tabledata() + .list(this.options.projectId(), datasetId, tableId) + .setMaxResults(MAX_RESULTS.getLong(options)) + .setPageToken(PAGE_TOKEN.getString(options)) + .setStartIndex(START_INDEX.getLong(options) != null + ? BigInteger.valueOf(START_INDEX.getLong(options)) : null) + .execute(); + return Tuple.>of(tableDataList.getPageToken(), + tableDataList.getRows()); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public Job getJob(String jobId, Map options) throws BigQueryException { + try { + return bigquery.jobs() + .get(this.options.projectId(), jobId) + .setFields(FIELDS.getString(options)) + .execute(); + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return null; + } + throw serviceException; + } + } + + @Override + public Tuple> listJobs(Map options) throws BigQueryException { + try { + JobList jobsList = bigquery.jobs() + .list(this.options.projectId()) + .setAllUsers(Option.ALL_USERS.getBoolean(options)) + .setFields(Option.FIELDS.getString(options)) + .setStateFilter(Option.STATE_FILTER.>get(options)) + .setMaxResults(MAX_RESULTS.getLong(options)) + .setPageToken(PAGE_TOKEN.getString(options)) + .setProjection(DEFAULT_PROJECTION) + .execute(); + Iterable jobs = jobsList.getJobs(); + return Tuple.of(jobsList.getNextPageToken(), + Iterables.transform(jobs != null ? jobs : ImmutableList.of(), + new Function() { + @Override + public Job apply(JobList.Jobs jobPb) { + JobStatus statusPb = jobPb.getStatus() != null + ? jobPb.getStatus() : new JobStatus(); + if (statusPb.getState() == null) { + statusPb.setState(jobPb.getState()); + } + if (statusPb.getErrorResult() == null) { + statusPb.setErrorResult(jobPb.getErrorResult()); + } + return new Job() + .setConfiguration(jobPb.getConfiguration()) + .setId(jobPb.getId()) + .setJobReference(jobPb.getJobReference()) + .setKind(jobPb.getKind()) + .setStatistics(jobPb.getStatistics()) + .setStatus(statusPb) + .setUserEmail(jobPb.getUserEmail()); + } + })); + } catch (IOException ex) { + throw translate(ex); + } + } + + @Override + public boolean cancel(String jobId) throws BigQueryException { + try { + bigquery.jobs().cancel(this.options.projectId(), jobId).execute(); + return true; + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return false; + } + throw serviceException; + } + } + + @Override + public GetQueryResultsResponse getQueryResults(String jobId, Map options) + throws BigQueryException { + try { + return bigquery.jobs().getQueryResults(this.options.projectId(), jobId) + .setMaxResults(MAX_RESULTS.getLong(options)) + .setPageToken(PAGE_TOKEN.getString(options)) + .setStartIndex(START_INDEX.getLong(options) != null + ? BigInteger.valueOf(START_INDEX.getLong(options)) : null) + .setTimeoutMs(TIMEOUT.getLong(options)) + .execute(); + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.code() == HTTP_NOT_FOUND) { + return null; + } + throw serviceException; + } + } + + @Override + public QueryResponse query(QueryRequest request) throws BigQueryException { + try { + return bigquery.jobs().query(this.options.projectId(), request).execute(); + } catch (IOException ex) { + throw translate(ex); + } + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/AclTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/AclTest.java new file mode 100644 index 000000000000..52159b0665ac --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/AclTest.java @@ -0,0 +1,95 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.api.services.bigquery.model.Dataset; +import com.google.gcloud.bigquery.Acl.Domain; +import com.google.gcloud.bigquery.Acl.Entity; +import com.google.gcloud.bigquery.Acl.Entity.Type; +import com.google.gcloud.bigquery.Acl.Group; +import com.google.gcloud.bigquery.Acl.Role; +import com.google.gcloud.bigquery.Acl.User; +import com.google.gcloud.bigquery.Acl.View; + +import org.junit.Test; + +public class AclTest { + + @Test + public void testDomainEntity() { + Domain entity = new Domain("d1"); + assertEquals("d1", entity.domain()); + assertEquals(Type.DOMAIN, entity.type()); + Dataset.Access pb = entity.toPb(); + assertEquals(entity, Entity.fromPb(pb)); + } + + @Test + public void testGroupEntity() { + Group entity = new Group("g1"); + assertEquals("g1", entity.identifier()); + assertEquals(Type.GROUP, entity.type()); + Dataset.Access pb = entity.toPb(); + assertEquals(entity, Entity.fromPb(pb)); + } + + @Test + public void testSpecialGroupEntity() { + Group entity = Group.ofAllAuthenticatedUsers(); + assertEquals("allAuthenticatedUsers", entity.identifier()); + entity = Group.ofProjectWriters(); + assertEquals("projectWriters", entity.identifier()); + entity = Group.ofProjectReaders(); + assertEquals("projectReaders", entity.identifier()); + entity = Group.ofProjectOwners(); + assertEquals("projectOwners", entity.identifier()); + } + + @Test + public void testUserEntity() { + User entity = new User("u1"); + assertEquals("u1", entity.email()); + assertEquals(Type.USER, entity.type()); + Dataset.Access pb = entity.toPb(); + assertEquals(entity, Entity.fromPb(pb)); + } + + @Test + public void testViewEntity() { + TableId viewId = TableId.of("project", "dataset", "view"); + View entity = new View(viewId); + assertEquals(viewId, entity.id()); + assertEquals(Type.VIEW, entity.type()); + Dataset.Access pb = entity.toPb(); + assertEquals(entity, Entity.fromPb(pb)); + } + + @Test + public void testAcl() { + Acl acl = new Acl(Group.ofAllAuthenticatedUsers(), Role.READER); + assertEquals(Group.ofAllAuthenticatedUsers(), acl.entity()); + assertEquals(Role.READER, acl.role()); + Dataset.Access pb = acl.toPb(); + assertEquals(acl, Acl.fromPb(pb)); + View view = new View(TableId.of("project", "dataset", "view")); + acl = new Acl(view); + assertEquals(view, acl.entity()); + assertEquals(null, acl.role()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryErrorTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryErrorTest.java new file mode 100644 index 000000000000..c8de039e233f --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryErrorTest.java @@ -0,0 +1,45 @@ +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class BigQueryErrorTest { + + private static final String REASON = "reason"; + private static final String LOCATION = "location"; + private static final String DEBUG_INFO = "debugInfo"; + private static final String MESSAGE = "message"; + private static final BigQueryError ERROR = + new BigQueryError(REASON, LOCATION, MESSAGE, DEBUG_INFO); + private static final BigQueryError ERROR_INCOMPLETE = + new BigQueryError(REASON, LOCATION, MESSAGE); + + @Test + public void testConstructor() { + assertEquals(REASON, ERROR.reason()); + assertEquals(LOCATION, ERROR.location()); + assertEquals(DEBUG_INFO, ERROR.debugInfo()); + assertEquals(MESSAGE, ERROR.message()); + assertEquals(REASON, ERROR_INCOMPLETE.reason()); + assertEquals(LOCATION, ERROR_INCOMPLETE.location()); + assertEquals(null, ERROR_INCOMPLETE.debugInfo()); + assertEquals(MESSAGE, ERROR_INCOMPLETE.message()); + } + + @Test + public void testToAndFromPb() { + compareBigQueryError(ERROR, BigQueryError.fromPb(ERROR.toPb())); + compareBigQueryError(ERROR_INCOMPLETE, BigQueryError.fromPb(ERROR_INCOMPLETE.toPb())); + } + + private void compareBigQueryError(BigQueryError expected, BigQueryError value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.reason(), value.reason()); + assertEquals(expected.location(), value.location()); + assertEquals(expected.debugInfo(), value.debugInfo()); + assertEquals(expected.message(), value.message()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java new file mode 100644 index 000000000000..bcc946f65006 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java @@ -0,0 +1,1041 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.easymock.EasyMock.capture; +import static org.easymock.EasyMock.eq; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; + +import com.google.api.services.bigquery.model.Dataset; +import com.google.api.services.bigquery.model.ErrorProto; +import com.google.api.services.bigquery.model.Job; +import com.google.api.services.bigquery.model.Table; +import com.google.api.services.bigquery.model.TableCell; +import com.google.api.services.bigquery.model.TableDataInsertAllRequest; +import com.google.api.services.bigquery.model.TableDataInsertAllResponse; +import com.google.api.services.bigquery.model.TableRow; +import com.google.common.base.Function; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import com.google.gcloud.Page; +import com.google.gcloud.RetryParams; +import com.google.gcloud.bigquery.InsertAllRequest.RowToInsert; +import com.google.gcloud.spi.BigQueryRpc; +import com.google.gcloud.spi.BigQueryRpc.Tuple; +import com.google.gcloud.spi.BigQueryRpcFactory; + +import org.easymock.Capture; +import org.easymock.EasyMock; +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.math.BigInteger; +import java.util.List; +import java.util.Map; + +public class BigQueryImplTest { + + private static final String PROJECT = "project"; + private static final String DATASET = "dataset"; + private static final String TABLE = "table"; + private static final String JOB = "job"; + private static final String OTHER_TABLE = "otherTable"; + private static final String OTHER_DATASET = "otherDataset"; + private static final List ACCESS_RULES = ImmutableList.of( + new Acl(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER), + new Acl(new Acl.View(TableId.of("dataset", "table")), Acl.Role.WRITER)); + private static final List ACCESS_RULES_WITH_PROJECT = ImmutableList.of( + new Acl(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER), + new Acl(new Acl.View(TableId.of(PROJECT, "dataset", "table")))); + private static final DatasetInfo DATASET_INFO = DatasetInfo.builder(DATASET) + .acl(ACCESS_RULES) + .description("description") + .build(); + private static final DatasetInfo DATASET_INFO_WITH_PROJECT = DatasetInfo.builder(PROJECT, DATASET) + .acl(ACCESS_RULES_WITH_PROJECT) + .description("description") + .build(); + private static final DatasetInfo OTHER_DATASET_INFO = DatasetInfo.builder(PROJECT, OTHER_DATASET) + .acl(ACCESS_RULES) + .description("other description") + .build(); + private static final TableId TABLE_ID = TableId.of(DATASET, TABLE); + private static final TableId OTHER_TABLE_ID = TableId.of(PROJECT, DATASET, OTHER_TABLE); + private static final TableId TABLE_ID_WITH_PROJECT = TableId.of(PROJECT, DATASET, TABLE); + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); + private static final TableInfo TABLE_INFO = TableInfo.of(TABLE_ID, TABLE_SCHEMA); + private static final TableInfo OTHER_TABLE_INFO = TableInfo.of(OTHER_TABLE_ID, TABLE_SCHEMA); + private static final TableInfo TABLE_INFO_WITH_PROJECT = + TableInfo.of(TABLE_ID_WITH_PROJECT, TABLE_SCHEMA); + private static final LoadJobInfo LOAD_JOB = LoadJobInfo.of(TABLE_ID, "URI"); + private static final LoadJobInfo LOAD_JOB_WITH_PROJECT = + LoadJobInfo.of(TABLE_ID_WITH_PROJECT, "URI"); + private static final LoadJobInfo COMPLETE_LOAD_JOB = + LoadJobInfo.builder(TABLE_ID_WITH_PROJECT, "URI") + .jobId(JobId.of(PROJECT, JOB)) + .build(); + private static final CopyJobInfo COPY_JOB = + CopyJobInfo.of(TABLE_ID, ImmutableList.of(TABLE_ID, TABLE_ID)); + private static final CopyJobInfo COPY_JOB_WITH_PROJECT = + CopyJobInfo.of(TABLE_ID_WITH_PROJECT, ImmutableList.of(TABLE_ID_WITH_PROJECT, + TABLE_ID_WITH_PROJECT)); + private static final CopyJobInfo COMPLETE_COPY_JOB = + CopyJobInfo.builder(TABLE_ID_WITH_PROJECT, ImmutableList.of(TABLE_ID_WITH_PROJECT, + TABLE_ID_WITH_PROJECT)) + .jobId(JobId.of(PROJECT, JOB)) + .build(); + private static final QueryJobInfo QUERY_JOB = QueryJobInfo.builder("SQL") + .defaultDataset(DatasetId.of(DATASET)) + .destinationTable(TABLE_ID) + .build(); + private static final QueryJobInfo QUERY_JOB_WITH_PROJECT = QueryJobInfo.builder("SQL") + .defaultDataset(DatasetId.of(PROJECT, DATASET)) + .destinationTable(TABLE_ID_WITH_PROJECT) + .build(); + private static final QueryJobInfo COMPLETE_QUERY_JOB = QueryJobInfo.builder("SQL") + .defaultDataset(DatasetId.of(PROJECT, DATASET)).destinationTable(TABLE_ID_WITH_PROJECT) + .jobId(JobId.of(PROJECT, JOB)) + .build(); + private static final ExtractJobInfo EXTRACT_JOB = ExtractJobInfo.of(TABLE_ID, "URI"); + private static final ExtractJobInfo EXTRACT_JOB_WITH_PROJECT = + ExtractJobInfo.of(TABLE_ID_WITH_PROJECT, "URI"); + private static final ExtractJobInfo COMPLETE_EXTRACT_JOB = + ExtractJobInfo.builder(TABLE_ID_WITH_PROJECT, "URI") + .jobId(JobId.of(PROJECT, JOB)) + .build(); + private static final TableCell BOOLEAN_FIELD = new TableCell().setV("false"); + private static final TableCell INTEGER_FIELD = new TableCell().setV("1"); + private static final TableRow TABLE_ROW = + new TableRow().setF(ImmutableList.of(BOOLEAN_FIELD, INTEGER_FIELD)); + private static final QueryRequest QUERY_REQUEST = QueryRequest.builder("SQL") + .maxResults(42L) + .useQueryCache(false) + .defaultDataset(DatasetId.of(DATASET)) + .build(); + private static final QueryRequest QUERY_REQUEST_WITH_PROJECT = QueryRequest.builder("SQL") + .maxResults(42L) + .useQueryCache(false) + .defaultDataset(DatasetId.of(PROJECT, DATASET)) + .build(); + + // Empty BigQueryRpc options + private static final Map EMPTY_RPC_OPTIONS = ImmutableMap.of(); + + // Dataset options + private static final BigQuery.DatasetOption DATASET_OPTION_FIELDS = + BigQuery.DatasetOption.fields(BigQuery.DatasetField.ACCESS, BigQuery.DatasetField.ETAG); + + // Dataset list options + private static final BigQuery.DatasetListOption DATASET_LIST_ALL = + BigQuery.DatasetListOption.all(); + private static final BigQuery.DatasetListOption DATASET_LIST_PAGE_TOKEN = + BigQuery.DatasetListOption.startPageToken("cursor"); + private static final BigQuery.DatasetListOption DATASET_LIST_MAX_RESULTS = + BigQuery.DatasetListOption.maxResults(42L); + private static final Map DATASET_LIST_OPTIONS = ImmutableMap.of( + BigQueryRpc.Option.ALL_DATASETS, true, + BigQueryRpc.Option.PAGE_TOKEN, "cursor", + BigQueryRpc.Option.MAX_RESULTS, 42L); + + // Dataset delete options + private static final BigQuery.DatasetDeleteOption DATASET_DELETE_CONTENTS = + BigQuery.DatasetDeleteOption.deleteContents(); + private static final Map DATASET_DELETE_OPTIONS = ImmutableMap.of( + BigQueryRpc.Option.DELETE_CONTENTS, true); + + // Table options + private static final BigQuery.TableOption TABLE_OPTION_FIELDS = + BigQuery.TableOption.fields(BigQuery.TableField.SCHEMA, BigQuery.TableField.ETAG); + + // Table list options + private static final BigQuery.TableListOption TABLE_LIST_MAX_RESULTS = + BigQuery.TableListOption.maxResults(42L); + private static final BigQuery.TableListOption TABLE_LIST_PAGE_TOKEN = + BigQuery.TableListOption.startPageToken("cursor"); + private static final Map TABLE_LIST_OPTIONS = ImmutableMap.of( + BigQueryRpc.Option.MAX_RESULTS, 42L, + BigQueryRpc.Option.PAGE_TOKEN, "cursor"); + + // TableData list options + private static final BigQuery.TableDataListOption TABLE_DATA_LIST_MAX_RESULTS = + BigQuery.TableDataListOption.maxResults(42L); + private static final BigQuery.TableDataListOption TABLE_DATA_LIST_PAGE_TOKEN = + BigQuery.TableDataListOption.startPageToken("cursor"); + private static final BigQuery.TableDataListOption TABLE_DATA_LIST_START_INDEX = + BigQuery.TableDataListOption.startIndex(0L); + private static final Map TABLE_DATA_LIST_OPTIONS = ImmutableMap.of( + BigQueryRpc.Option.MAX_RESULTS, 42L, + BigQueryRpc.Option.PAGE_TOKEN, "cursor", + BigQueryRpc.Option.START_INDEX, 0L); + + // Job options + private static final BigQuery.JobOption JOB_OPTION_FIELDS = + BigQuery.JobOption.fields(BigQuery.JobField.USER_EMAIL); + + // Job list options + private static final BigQuery.JobListOption JOB_LIST_OPTION_FIELD = + BigQuery.JobListOption.fields(BigQuery.JobField.STATISTICS); + private static final BigQuery.JobListOption JOB_LIST_ALL_USERS = + BigQuery.JobListOption.allUsers(); + private static final BigQuery.JobListOption JOB_LIST_STATE_FILTER = + BigQuery.JobListOption.stateFilter(JobStatus.State.DONE, JobStatus.State.PENDING); + private static final BigQuery.JobListOption JOB_LIST_PAGE_TOKEN = + BigQuery.JobListOption.startPageToken("cursor"); + private static final BigQuery.JobListOption JOB_LIST_MAX_RESULTS = + BigQuery.JobListOption.maxResults(42L); + private static final Map JOB_LIST_OPTIONS = ImmutableMap.of( + BigQueryRpc.Option.ALL_USERS, true, + BigQueryRpc.Option.STATE_FILTER, ImmutableList.of("done", "pending"), + BigQueryRpc.Option.PAGE_TOKEN, "cursor", + BigQueryRpc.Option.MAX_RESULTS, 42L); + + // Query Results options + private static final BigQuery.QueryResultsOption QUERY_RESULTS_OPTION_TIME = + BigQuery.QueryResultsOption.maxWaitTime(42L); + private static final BigQuery.QueryResultsOption QUERY_RESULTS_OPTION_INDEX = + BigQuery.QueryResultsOption.startIndex(1024L); + private static final BigQuery.QueryResultsOption QUERY_RESULTS_OPTION_PAGE_TOKEN = + BigQuery.QueryResultsOption.startPageToken("cursor"); + private static final BigQuery.QueryResultsOption QUERY_RESULTS_OPTION_MAX_RESULTS = + BigQuery.QueryResultsOption.maxResults(0L); + private static final Map QUERY_RESULTS_OPTIONS = ImmutableMap.of( + BigQueryRpc.Option.TIMEOUT, 42L, + BigQueryRpc.Option.START_INDEX, 1024L, + BigQueryRpc.Option.PAGE_TOKEN, "cursor", + BigQueryRpc.Option.MAX_RESULTS, 0L); + + private BigQueryOptions options; + private BigQueryRpcFactory rpcFactoryMock; + private BigQueryRpc bigqueryRpcMock; + private BigQuery bigquery; + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @Before + public void setUp() { + rpcFactoryMock = EasyMock.createMock(BigQueryRpcFactory.class); + bigqueryRpcMock = EasyMock.createMock(BigQueryRpc.class); + EasyMock.expect(rpcFactoryMock.create(EasyMock.anyObject(BigQueryOptions.class))) + .andReturn(bigqueryRpcMock); + EasyMock.replay(rpcFactoryMock); + options = BigQueryOptions.builder() + .projectId(PROJECT) + .serviceRpcFactory(rpcFactoryMock) + .build(); + } + + @After + public void tearDown() { + EasyMock.verify(rpcFactoryMock, bigqueryRpcMock); + } + + @Test + public void testGetOptions() { + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertSame(options, bigquery.options()); + } + + @Test + public void testCreateDataset() { + EasyMock.expect(bigqueryRpcMock.create(DATASET_INFO_WITH_PROJECT.toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(DATASET_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + DatasetInfo dataset = bigquery.create(DATASET_INFO); + assertEquals(DATASET_INFO_WITH_PROJECT, dataset); + } + + @Test + public void testCreateDatasetWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + EasyMock.expect( + bigqueryRpcMock.create(eq(DATASET_INFO_WITH_PROJECT.toPb()), capture(capturedOptions))) + .andReturn(DATASET_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + DatasetInfo dataset = bigquery.create(DATASET_INFO, DATASET_OPTION_FIELDS); + String selector = (String) capturedOptions.getValue().get(DATASET_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("datasetReference")); + assertTrue(selector.contains("access")); + assertTrue(selector.contains("etag")); + assertEquals(28, selector.length()); + assertEquals(DATASET_INFO_WITH_PROJECT, dataset); + } + + @Test + public void testGetDataset() { + EasyMock.expect(bigqueryRpcMock.getDataset(DATASET, EMPTY_RPC_OPTIONS)) + .andReturn(DATASET_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + DatasetInfo dataset = bigquery.getDataset(DATASET); + assertEquals(DATASET_INFO_WITH_PROJECT, dataset); + } + + @Test + public void testGetDatasetFromDatasetId() { + EasyMock.expect(bigqueryRpcMock.getDataset(DATASET, EMPTY_RPC_OPTIONS)) + .andReturn(DATASET_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + DatasetInfo dataset = bigquery.getDataset(DatasetId.of(PROJECT, DATASET)); + assertEquals(DATASET_INFO_WITH_PROJECT, dataset); + } + + @Test + public void testGetDatasetWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + EasyMock.expect(bigqueryRpcMock.getDataset(eq(DATASET), capture(capturedOptions))) + .andReturn(DATASET_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + DatasetInfo dataset = bigquery.getDataset(DATASET, DATASET_OPTION_FIELDS); + String selector = (String) capturedOptions.getValue().get(DATASET_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("datasetReference")); + assertTrue(selector.contains("access")); + assertTrue(selector.contains("etag")); + assertEquals(28, selector.length()); + assertEquals(DATASET_INFO_WITH_PROJECT, dataset); + } + + @Test + public void testListDatasets() { + String cursor = "cursor"; + ImmutableList datasetList = ImmutableList.of(DATASET_INFO_WITH_PROJECT, + OTHER_DATASET_INFO); + Tuple> result = + Tuple.of(cursor, Iterables.transform(datasetList, DatasetInfo.TO_PB_FUNCTION)); + EasyMock.expect(bigqueryRpcMock.listDatasets(EMPTY_RPC_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page page = bigquery.listDatasets(); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(datasetList.toArray(), Iterables.toArray(page.values(), DatasetInfo.class)); + } + + @Test + public void testListEmptyDatasets() { + ImmutableList datasets = ImmutableList.of(); + Tuple> result = Tuple.>of(null, datasets); + EasyMock.expect(bigqueryRpcMock.listDatasets(EMPTY_RPC_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page page = bigquery.listDatasets(); + assertNull(page.nextPageCursor()); + assertArrayEquals(ImmutableList.of().toArray(), + Iterables.toArray(page.values(), DatasetInfo.class)); + } + + @Test + public void testListDatasetsWithOptions() { + String cursor = "cursor"; + ImmutableList datasetList = ImmutableList.of(DATASET_INFO_WITH_PROJECT, + OTHER_DATASET_INFO); + Tuple> result = + Tuple.of(cursor, Iterables.transform(datasetList, DatasetInfo.TO_PB_FUNCTION)); + EasyMock.expect(bigqueryRpcMock.listDatasets(DATASET_LIST_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page page = bigquery.listDatasets(DATASET_LIST_ALL, DATASET_LIST_PAGE_TOKEN, + DATASET_LIST_MAX_RESULTS); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(datasetList.toArray(), Iterables.toArray(page.values(), DatasetInfo.class)); + } + + @Test + public void testDeleteDataset() { + EasyMock.expect(bigqueryRpcMock.deleteDataset(DATASET, EMPTY_RPC_OPTIONS)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.delete(DATASET)); + } + + @Test + public void testDeleteDatasetFromDatasetId() { + EasyMock.expect(bigqueryRpcMock.deleteDataset(DATASET, EMPTY_RPC_OPTIONS)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.delete(DatasetId.of(PROJECT, DATASET))); + } + + @Test + public void testDeleteDatasetWithOptions() { + EasyMock.expect(bigqueryRpcMock.deleteDataset(DATASET, DATASET_DELETE_OPTIONS)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.delete(DATASET, DATASET_DELETE_CONTENTS)); + } + + @Test + public void testUpdateDataset() { + DatasetInfo updatedDatasetInfo = DATASET_INFO.toBuilder().description("newDescription").build(); + DatasetInfo updatedDatasetInfoWithProject = DATASET_INFO_WITH_PROJECT.toBuilder() + .description("newDescription") + .build(); + EasyMock.expect(bigqueryRpcMock.patch(updatedDatasetInfoWithProject.toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(updatedDatasetInfoWithProject.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + DatasetInfo dataset = bigquery.update(updatedDatasetInfo); + assertEquals(updatedDatasetInfoWithProject, dataset); + } + + @Test + public void testUpdateDatasetWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + DatasetInfo updatedDatasetInfo = DATASET_INFO.toBuilder().description("newDescription").build(); + DatasetInfo updatedDatasetInfoWithProject = DATASET_INFO_WITH_PROJECT.toBuilder() + .description("newDescription") + .build(); + EasyMock.expect( + bigqueryRpcMock.patch(eq(updatedDatasetInfoWithProject.toPb()), capture(capturedOptions))) + .andReturn(updatedDatasetInfoWithProject.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + DatasetInfo dataset = bigquery.update(updatedDatasetInfo, DATASET_OPTION_FIELDS); + String selector = (String) capturedOptions.getValue().get(DATASET_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("datasetReference")); + assertTrue(selector.contains("access")); + assertTrue(selector.contains("etag")); + assertEquals(28, selector.length()); + assertEquals(updatedDatasetInfoWithProject, dataset); + } + + @Test + public void testCreateTable() { + EasyMock.expect(bigqueryRpcMock.create(TABLE_INFO_WITH_PROJECT.toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(TABLE_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + TableInfo table = bigquery.create(TABLE_INFO); + assertEquals(TABLE_INFO_WITH_PROJECT, table); + } + + @Test + public void testCreateTableWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + EasyMock.expect( + bigqueryRpcMock.create(eq(TABLE_INFO_WITH_PROJECT.toPb()), capture(capturedOptions))) + .andReturn(TABLE_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + TableInfo table = bigquery.create(TABLE_INFO, TABLE_OPTION_FIELDS); + String selector = (String) capturedOptions.getValue().get(TABLE_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("tableReference")); + assertTrue(selector.contains("schema")); + assertTrue(selector.contains("etag")); + assertEquals(31, selector.length()); + assertEquals(TABLE_INFO_WITH_PROJECT, table); + } + + @Test + public void testGetTable() { + EasyMock.expect(bigqueryRpcMock.getTable(DATASET, TABLE, EMPTY_RPC_OPTIONS)) + .andReturn(TABLE_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + TableInfo table = bigquery.getTable(DATASET, TABLE); + assertEquals(TABLE_INFO_WITH_PROJECT, table); + } + + @Test + public void testGetTableFromTableId() { + EasyMock.expect(bigqueryRpcMock.getTable(DATASET, TABLE, EMPTY_RPC_OPTIONS)) + .andReturn(TABLE_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + TableInfo table = bigquery.getTable(TABLE_ID); + assertEquals(TABLE_INFO_WITH_PROJECT, table); + } + + @Test + public void testGetTableWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + EasyMock.expect(bigqueryRpcMock.getTable(eq(DATASET), eq(TABLE), capture(capturedOptions))) + .andReturn(TABLE_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + TableInfo table = bigquery.getTable(TABLE_ID, TABLE_OPTION_FIELDS); + String selector = (String) capturedOptions.getValue().get(TABLE_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("tableReference")); + assertTrue(selector.contains("schema")); + assertTrue(selector.contains("etag")); + assertEquals(31, selector.length()); + assertEquals(TABLE_INFO_WITH_PROJECT, table); + } + + @Test + public void testListTables() { + String cursor = "cursor"; + ImmutableList tableList = + ImmutableList.of(TABLE_INFO_WITH_PROJECT, OTHER_TABLE_INFO); + Tuple> result = + Tuple.of(cursor, Iterables.transform(tableList, BaseTableInfo.TO_PB_FUNCTION)); + EasyMock.expect(bigqueryRpcMock.listTables(DATASET, EMPTY_RPC_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page page = bigquery.listTables(DATASET); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(tableList.toArray(), Iterables.toArray(page.values(), BaseTableInfo.class)); + } + + @Test + public void testListTablesFromDatasetId() { + String cursor = "cursor"; + ImmutableList tableList = + ImmutableList.of(TABLE_INFO_WITH_PROJECT, OTHER_TABLE_INFO); + Tuple> result = + Tuple.of(cursor, Iterables.transform(tableList, BaseTableInfo.TO_PB_FUNCTION)); + EasyMock.expect(bigqueryRpcMock.listTables(DATASET, EMPTY_RPC_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page page = bigquery.listTables(DatasetId.of(PROJECT, DATASET)); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(tableList.toArray(), Iterables.toArray(page.values(), BaseTableInfo.class)); + } + + @Test + public void testListTablesWithOptions() { + String cursor = "cursor"; + ImmutableList tableList = + ImmutableList.of(TABLE_INFO_WITH_PROJECT, OTHER_TABLE_INFO); + Tuple> result = + Tuple.of(cursor, Iterables.transform(tableList, BaseTableInfo.TO_PB_FUNCTION)); + EasyMock.expect(bigqueryRpcMock.listTables(DATASET, TABLE_LIST_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page page = bigquery.listTables(DATASET, TABLE_LIST_MAX_RESULTS, + TABLE_LIST_PAGE_TOKEN); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(tableList.toArray(), Iterables.toArray(page.values(), BaseTableInfo.class)); + } + + @Test + public void testDeleteTable() { + EasyMock.expect(bigqueryRpcMock.deleteTable(DATASET, TABLE)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.delete(DATASET, TABLE)); + } + + @Test + public void testDeleteTableFromTableId() { + EasyMock.expect(bigqueryRpcMock.deleteTable(DATASET, TABLE)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.delete(TABLE_ID)); + } + + @Test + public void testUpdateTable() { + TableInfo updatedTableInfo = TABLE_INFO.toBuilder().description("newDescription").build(); + TableInfo updatedTableInfoWithProject = TABLE_INFO_WITH_PROJECT.toBuilder() + .description("newDescription") + .build(); + EasyMock.expect(bigqueryRpcMock.patch(updatedTableInfoWithProject.toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(updatedTableInfoWithProject.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + TableInfo table = bigquery.update(updatedTableInfo); + assertEquals(updatedTableInfoWithProject, table); + } + + @Test + public void testUpdateTableWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + TableInfo updatedTableInfo = TABLE_INFO.toBuilder().description("newDescription").build(); + TableInfo updatedTableInfoWithProject = TABLE_INFO_WITH_PROJECT.toBuilder() + .description("newDescription") + .build(); + EasyMock.expect(bigqueryRpcMock.patch(eq(updatedTableInfoWithProject.toPb()), + capture(capturedOptions))).andReturn(updatedTableInfoWithProject.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + TableInfo table = bigquery.update(updatedTableInfo, TABLE_OPTION_FIELDS); + String selector = (String) capturedOptions.getValue().get(TABLE_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("tableReference")); + assertTrue(selector.contains("schema")); + assertTrue(selector.contains("etag")); + assertEquals(31, selector.length()); + assertEquals(updatedTableInfoWithProject, table); + } + + @Test + public void testInsertAll() { + Map row1 = ImmutableMap.of("field", "value1"); + Map row2 = ImmutableMap.of("field", "value2"); + List rows = ImmutableList.of( + new RowToInsert("row1", row1), + new RowToInsert("row2", row2) + ); + InsertAllRequest request = InsertAllRequest.builder(TABLE_ID) + .rows(rows) + .skipInvalidRows(false) + .ignoreUnknownValues(true) + .build(); + TableDataInsertAllRequest requestPb = new TableDataInsertAllRequest().setRows( + Lists.transform(rows, new Function() { + @Override + public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { + return new TableDataInsertAllRequest.Rows().setInsertId(rowToInsert.id()) + .setJson(rowToInsert.content()); + } + }) + ).setSkipInvalidRows(false).setIgnoreUnknownValues(true); + TableDataInsertAllResponse responsePb = new TableDataInsertAllResponse().setInsertErrors( + ImmutableList.of(new TableDataInsertAllResponse.InsertErrors().setIndex(0L).setErrors( + ImmutableList.of(new ErrorProto().setMessage("ErrorMessage"))))); + EasyMock.expect(bigqueryRpcMock.insertAll(DATASET, TABLE, requestPb)) + .andReturn(responsePb); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + InsertAllResponse response = bigquery.insertAll(request); + assertNotNull(response.errorsFor(0L)); + assertNull(response.errorsFor(1L)); + assertEquals(1, response.errorsFor(0L).size()); + assertEquals("ErrorMessage", response.errorsFor(0L).get(0).message()); + } + + @Test + public void testListTableData() { + String cursor = "cursor"; + com.google.api.services.bigquery.model.TableCell cell1 = + new com.google.api.services.bigquery.model.TableCell().setV("Value1"); + com.google.api.services.bigquery.model.TableCell cell2 = + new com.google.api.services.bigquery.model.TableCell().setV("Value2"); + ImmutableList> tableData = ImmutableList.of( + (List) ImmutableList.of(FieldValue.fromPb(cell1)), + ImmutableList.of(FieldValue.fromPb(cell2))); + Tuple> result = + Tuple.>of(cursor, + ImmutableList.of( + new com.google.api.services.bigquery.model.TableRow().setF( + ImmutableList.of(new com.google.api.services.bigquery.model.TableCell() + .setV("Value1"))), + new com.google.api.services.bigquery.model.TableRow().setF( + ImmutableList.of(new com.google.api.services.bigquery.model.TableCell() + .setV("Value2"))))); + EasyMock.expect(bigqueryRpcMock.listTableData(DATASET, TABLE, EMPTY_RPC_OPTIONS)) + .andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page> page = bigquery.listTableData(DATASET, TABLE); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(tableData.toArray(), Iterables.toArray(page.values(), List.class)); + } + + @Test + public void testListTableDataFromTableId() { + String cursor = "cursor"; + com.google.api.services.bigquery.model.TableCell cell1 = + new com.google.api.services.bigquery.model.TableCell().setV("Value1"); + com.google.api.services.bigquery.model.TableCell cell2 = + new com.google.api.services.bigquery.model.TableCell().setV("Value2"); + ImmutableList> tableData = ImmutableList.of( + (List) ImmutableList.of(FieldValue.fromPb(cell1)), + ImmutableList.of(FieldValue.fromPb(cell2))); + Tuple> result = + Tuple.>of(cursor, + ImmutableList.of( + new com.google.api.services.bigquery.model.TableRow().setF( + ImmutableList.of(new com.google.api.services.bigquery.model.TableCell() + .setV("Value1"))), + new com.google.api.services.bigquery.model.TableRow().setF( + ImmutableList.of(new com.google.api.services.bigquery.model.TableCell() + .setV("Value2"))))); + EasyMock.expect(bigqueryRpcMock.listTableData(DATASET, TABLE, EMPTY_RPC_OPTIONS)) + .andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page> page = bigquery.listTableData(TableId.of(DATASET, TABLE)); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(tableData.toArray(), Iterables.toArray(page.values(), List.class)); + } + + @Test + public void testListTableDataWithOptions() { + String cursor = "cursor"; + com.google.api.services.bigquery.model.TableCell cell1 = + new com.google.api.services.bigquery.model.TableCell().setV("Value1"); + com.google.api.services.bigquery.model.TableCell cell2 = + new com.google.api.services.bigquery.model.TableCell().setV("Value2"); + ImmutableList> tableData = ImmutableList.of( + (List) ImmutableList.of(FieldValue.fromPb(cell1)), + ImmutableList.of(FieldValue.fromPb(cell2))); + Tuple> result = + Tuple.>of(cursor, + ImmutableList.of( + new com.google.api.services.bigquery.model.TableRow().setF( + ImmutableList.of(new com.google.api.services.bigquery.model.TableCell() + .setV("Value1"))), + new com.google.api.services.bigquery.model.TableRow().setF( + ImmutableList.of(new com.google.api.services.bigquery.model.TableCell() + .setV("Value2"))))); + EasyMock.expect(bigqueryRpcMock.listTableData(DATASET, TABLE, TABLE_DATA_LIST_OPTIONS)) + .andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page> page = bigquery.listTableData(DATASET, TABLE, + TABLE_DATA_LIST_MAX_RESULTS, TABLE_DATA_LIST_PAGE_TOKEN, TABLE_DATA_LIST_START_INDEX); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(tableData.toArray(), Iterables.toArray(page.values(), List.class)); + } + + @Test + public void testCreateQueryJob() { + EasyMock.expect(bigqueryRpcMock.create(QUERY_JOB_WITH_PROJECT.toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_QUERY_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + QueryJobInfo job = bigquery.create(QUERY_JOB); + assertEquals(COMPLETE_QUERY_JOB, job); + } + + @Test + public void testCreateLoadJob() { + EasyMock.expect(bigqueryRpcMock.create(LOAD_JOB_WITH_PROJECT.toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_LOAD_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + LoadJobInfo job = bigquery.create(LOAD_JOB); + assertEquals(COMPLETE_LOAD_JOB, job); + } + + @Test + public void testCreateCopyJob() { + EasyMock.expect(bigqueryRpcMock.create(COPY_JOB_WITH_PROJECT.toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_COPY_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + CopyJobInfo job = bigquery.create(COPY_JOB); + assertEquals(COMPLETE_COPY_JOB, job); + } + + @Test + public void testCreateExtractJob() { + EasyMock.expect(bigqueryRpcMock.create(EXTRACT_JOB_WITH_PROJECT.toPb(), EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_EXTRACT_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + ExtractJobInfo job = bigquery.create(EXTRACT_JOB); + assertEquals(COMPLETE_EXTRACT_JOB, job); + } + + @Test + public void testCreateJobWithSelectedFields() { + Capture> capturedOptions = Capture.newInstance(); + EasyMock.expect( + bigqueryRpcMock.create(eq(QUERY_JOB_WITH_PROJECT.toPb()), capture(capturedOptions))) + .andReturn(COMPLETE_QUERY_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + QueryJobInfo job = bigquery.create(QUERY_JOB, JOB_OPTION_FIELDS); + assertEquals(COMPLETE_QUERY_JOB, job); + String selector = (String) capturedOptions.getValue().get(JOB_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("jobReference")); + assertTrue(selector.contains("configuration")); + assertTrue(selector.contains("user_email")); + assertEquals(37, selector.length()); + } + + @Test + public void testGetJob() { + EasyMock.expect(bigqueryRpcMock.getJob(JOB, EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_COPY_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + CopyJobInfo job = bigquery.getJob(JOB); + assertEquals(COMPLETE_COPY_JOB, job); + } + + @Test + public void testGetJobFromJobId() { + EasyMock.expect(bigqueryRpcMock.getJob(JOB, EMPTY_RPC_OPTIONS)) + .andReturn(COMPLETE_COPY_JOB.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + CopyJobInfo job = bigquery.getJob(JobId.of(PROJECT, JOB)); + assertEquals(COMPLETE_COPY_JOB, job); + } + + @Test + public void testListJobs() { + String cursor = "cursor"; + ImmutableList jobList = + ImmutableList.of(QUERY_JOB_WITH_PROJECT, LOAD_JOB_WITH_PROJECT); + Tuple> result = + Tuple.of(cursor, Iterables.transform(jobList, new Function() { + @Override + public Job apply(JobInfo jobInfo) { + return jobInfo.toPb(); + } + })); + EasyMock.expect(bigqueryRpcMock.listJobs(EMPTY_RPC_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page page = bigquery.listJobs(); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(jobList.toArray(), Iterables.toArray(page.values(), JobInfo.class)); + } + + @Test + public void testListJobsWithOptions() { + String cursor = "cursor"; + ImmutableList jobList = + ImmutableList.of(QUERY_JOB_WITH_PROJECT, LOAD_JOB_WITH_PROJECT); + Tuple> result = + Tuple.of(cursor, Iterables.transform(jobList, new Function() { + @Override + public Job apply(JobInfo jobInfo) { + return jobInfo.toPb(); + } + })); + EasyMock.expect(bigqueryRpcMock.listJobs(JOB_LIST_OPTIONS)).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page page = bigquery.listJobs(JOB_LIST_ALL_USERS, JOB_LIST_STATE_FILTER, + JOB_LIST_PAGE_TOKEN, JOB_LIST_MAX_RESULTS); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(jobList.toArray(), Iterables.toArray(page.values(), JobInfo.class)); + } + + @Test + public void testListJobsWithSelectedFields() { + String cursor = "cursor"; + Capture> capturedOptions = Capture.newInstance(); + ImmutableList jobList = + ImmutableList.of(QUERY_JOB_WITH_PROJECT, LOAD_JOB_WITH_PROJECT); + Tuple> result = + Tuple.of(cursor, Iterables.transform(jobList, new Function() { + @Override + public Job apply(JobInfo jobInfo) { + return jobInfo.toPb(); + } + })); + EasyMock.expect(bigqueryRpcMock.listJobs(capture(capturedOptions))).andReturn(result); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + Page page = bigquery.listJobs(JOB_LIST_OPTION_FIELD); + assertEquals(cursor, page.nextPageCursor()); + assertArrayEquals(jobList.toArray(), Iterables.toArray(page.values(), JobInfo.class)); + String selector = (String) capturedOptions.getValue().get(JOB_OPTION_FIELDS.rpcOption()); + assertTrue(selector.contains("etag,jobs(")); + assertTrue(selector.contains("configuration")); + assertTrue(selector.contains("jobReference")); + assertTrue(selector.contains("statistics")); + assertTrue(selector.contains("state,errorResult),nextPageToken")); + assertEquals(80, selector.length()); + } + + @Test + public void testCancelJob() { + EasyMock.expect(bigqueryRpcMock.cancel(JOB)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.cancel(JOB)); + } + + @Test + public void testCancelJobFromJobId() { + EasyMock.expect(bigqueryRpcMock.cancel(JOB)).andReturn(true); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + assertTrue(bigquery.cancel(JobId.of(PROJECT, JOB))); + } + + @Test + public void testQueryRequest() { + JobId queryJob = JobId.of(PROJECT, JOB); + com.google.api.services.bigquery.model.QueryResponse responsePb = + new com.google.api.services.bigquery.model.QueryResponse() + .setJobReference(queryJob.toPb()) + .setJobComplete(false); + EasyMock.expect(bigqueryRpcMock.query(QUERY_REQUEST_WITH_PROJECT.toPb())).andReturn(responsePb); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + QueryResponse response = bigquery.query(QUERY_REQUEST); + assertNull(response.etag()); + assertNull(response.result()); + assertEquals(queryJob, response.jobId()); + assertEquals(false, response.jobComplete()); + assertEquals(ImmutableList.of(), response.executionErrors()); + assertFalse(response.hasErrors()); + assertEquals(null, response.result()); + } + + @Test + public void testQueryRequestCompleted() { + JobId queryJob = JobId.of(PROJECT, JOB); + com.google.api.services.bigquery.model.QueryResponse responsePb = + new com.google.api.services.bigquery.model.QueryResponse() + .setJobReference(queryJob.toPb()) + .setRows(ImmutableList.of(TABLE_ROW)) + .setJobComplete(true) + .setCacheHit(false) + .setPageToken("cursor") + .setTotalBytesProcessed(42L) + .setTotalRows(BigInteger.valueOf(1L)); + EasyMock.expect(bigqueryRpcMock.query(QUERY_REQUEST_WITH_PROJECT.toPb())).andReturn(responsePb); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + QueryResponse response = bigquery.query(QUERY_REQUEST); + assertNull(response.etag()); + assertEquals(queryJob, response.jobId()); + assertEquals(true, response.jobComplete()); + assertEquals(false, response.result().cacheHit()); + assertEquals(ImmutableList.of(), response.executionErrors()); + assertFalse(response.hasErrors()); + assertEquals(null, response.result().schema()); + assertEquals(42L, response.result().totalBytesProcessed()); + assertEquals(1L, response.result().totalRows()); + for (List row : response.result().values()) { + assertEquals(false, row.get(0).booleanValue()); + assertEquals(1L, row.get(1).longValue()); + } + assertEquals("cursor", response.result().nextPageCursor()); + } + + @Test + public void testGetQueryResults() { + JobId queryJob = JobId.of(PROJECT, JOB); + com.google.api.services.bigquery.model.GetQueryResultsResponse responsePb = + new com.google.api.services.bigquery.model.GetQueryResultsResponse() + .setEtag("etag") + .setJobReference(queryJob.toPb()) + .setRows(ImmutableList.of(TABLE_ROW)) + .setJobComplete(true) + .setCacheHit(false) + .setPageToken("cursor") + .setTotalBytesProcessed(42L) + .setTotalRows(BigInteger.valueOf(1L)); + EasyMock.expect(bigqueryRpcMock.getQueryResults(JOB, EMPTY_RPC_OPTIONS)).andReturn(responsePb); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + QueryResponse response = bigquery.getQueryResults(queryJob); + assertEquals("etag", response.etag()); + assertEquals(queryJob, response.jobId()); + assertEquals(true, response.jobComplete()); + assertEquals(false, response.result().cacheHit()); + assertEquals(ImmutableList.of(), response.executionErrors()); + assertFalse(response.hasErrors()); + assertEquals(null, response.result().schema()); + assertEquals(42L, response.result().totalBytesProcessed()); + assertEquals(1L, response.result().totalRows()); + for (List row : response.result().values()) { + assertEquals(false, row.get(0).booleanValue()); + assertEquals(1L, row.get(1).longValue()); + } + assertEquals("cursor", response.result().nextPageCursor()); + } + + @Test + public void testGetQueryResultsWithOptions() { + JobId queryJob = JobId.of(PROJECT, JOB); + com.google.api.services.bigquery.model.GetQueryResultsResponse responsePb = + new com.google.api.services.bigquery.model.GetQueryResultsResponse() + .setJobReference(queryJob.toPb()) + .setRows(ImmutableList.of(TABLE_ROW)) + .setJobComplete(true) + .setCacheHit(false) + .setPageToken("cursor") + .setTotalBytesProcessed(42L) + .setTotalRows(BigInteger.valueOf(1L)); + EasyMock.expect(bigqueryRpcMock.getQueryResults(JOB, QUERY_RESULTS_OPTIONS)) + .andReturn(responsePb); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.service(); + QueryResponse response = bigquery.getQueryResults(queryJob, QUERY_RESULTS_OPTION_TIME, + QUERY_RESULTS_OPTION_INDEX, QUERY_RESULTS_OPTION_MAX_RESULTS, + QUERY_RESULTS_OPTION_PAGE_TOKEN); + assertEquals(queryJob, response.jobId()); + assertEquals(true, response.jobComplete()); + assertEquals(false, response.result().cacheHit()); + assertEquals(ImmutableList.of(), response.executionErrors()); + assertFalse(response.hasErrors()); + assertEquals(null, response.result().schema()); + assertEquals(42L, response.result().totalBytesProcessed()); + assertEquals(1L, response.result().totalRows()); + for (List row : response.result().values()) { + assertEquals(false, row.get(0).booleanValue()); + assertEquals(1L, row.get(1).longValue()); + } + assertEquals("cursor", response.result().nextPageCursor()); + } + + @Test + public void testRetryableException() { + EasyMock.expect(bigqueryRpcMock.getDataset(DATASET, EMPTY_RPC_OPTIONS)) + .andThrow(new BigQueryException(500, "InternalError", true)) + .andReturn(DATASET_INFO_WITH_PROJECT.toPb()); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service(); + DatasetInfo dataset = bigquery.getDataset(DATASET); + assertEquals(DATASET_INFO_WITH_PROJECT, dataset); + } + + @Test + public void testNonRetryableException() { + String exceptionMessage = "Not Implemented"; + EasyMock.expect(bigqueryRpcMock.getDataset(DATASET, EMPTY_RPC_OPTIONS)) + .andThrow(new BigQueryException(501, exceptionMessage, false)); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service(); + thrown.expect(BigQueryException.class); + thrown.expectMessage(exceptionMessage); + bigquery.getDataset(DatasetId.of(DATASET)); + } + + @Test + public void testRuntimeException() { + String exceptionMessage = "Artificial runtime exception"; + EasyMock.expect(bigqueryRpcMock.getDataset(DATASET, EMPTY_RPC_OPTIONS)) + .andThrow(new RuntimeException(exceptionMessage)); + EasyMock.replay(bigqueryRpcMock); + bigquery = options.toBuilder().retryParams(RetryParams.defaultInstance()).build().service(); + thrown.expect(BigQueryException.class); + thrown.expectMessage(exceptionMessage); + bigquery.getDataset(DATASET); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/CopyJobInfoTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/CopyJobInfoTest.java new file mode 100644 index 000000000000..81da59644cf0 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/CopyJobInfoTest.java @@ -0,0 +1,174 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; + +import org.junit.Test; + +import java.util.List; + +public class CopyJobInfoTest { + + private static final String ETAG = "etag"; + private static final String ID = "id"; + private static final String SELF_LINK = "selfLink"; + private static final String EMAIL = "email"; + private static final TableId SOURCE_TABLE = TableId.of("dataset", "sourceTable"); + private static final List SOURCE_TABLES = ImmutableList.of( + TableId.of("dataset", "sourceTable1"), + TableId.of("dataset", "sourceTable2") + ); + private static final TableId DESTINATION_TABLE = TableId.of("dataset", "destinationTable"); + private static final CreateDisposition CREATE_DISPOSITION = CreateDisposition.CREATE_IF_NEEDED; + private static final WriteDisposition WRITE_DISPOSITION = WriteDisposition.WRITE_APPEND; + private static final JobId JOB_ID = JobId.of("job"); + private static final JobStatus JOB_STATUS = new JobStatus(JobStatus.State.DONE); + private static final JobStatistics JOB_STATISTICS = JobStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .build(); + private static final CopyJobInfo COPY_JOB = + CopyJobInfo.builder(DESTINATION_TABLE, SOURCE_TABLE) + .etag(ETAG) + .id(ID) + .selfLink(SELF_LINK) + .userEmail(EMAIL) + .jobId(JOB_ID) + .status(JOB_STATUS) + .createDisposition(CREATE_DISPOSITION) + .writeDisposition(WRITE_DISPOSITION) + .statistics(JOB_STATISTICS) + .build(); + private static final CopyJobInfo COPY_JOB_INFO_MULTIPLE_TABLES = + CopyJobInfo.builder(DESTINATION_TABLE, SOURCE_TABLES) + .etag(ETAG) + .id(ID) + .selfLink(SELF_LINK) + .userEmail(EMAIL) + .jobId(JOB_ID) + .status(JOB_STATUS) + .createDisposition(CREATE_DISPOSITION) + .writeDisposition(WRITE_DISPOSITION) + .build(); + + @Test + public void testToBuilder() { + compareCopyJobInfo(COPY_JOB, COPY_JOB.toBuilder().build()); + compareCopyJobInfo(COPY_JOB_INFO_MULTIPLE_TABLES, + COPY_JOB_INFO_MULTIPLE_TABLES.toBuilder().build()); + CopyJobInfo job = COPY_JOB.toBuilder() + .destinationTable(TableId.of("dataset", "newTable")) + .build(); + assertEquals("newTable", job.destinationTable().table()); + job = job.toBuilder().destinationTable(DESTINATION_TABLE).build(); + compareCopyJobInfo(COPY_JOB, job); + } + + @Test + public void testOf() { + CopyJobInfo job = CopyJobInfo.of(DESTINATION_TABLE, SOURCE_TABLES); + assertEquals(DESTINATION_TABLE, job.destinationTable()); + assertEquals(SOURCE_TABLES, job.sourceTables()); + job = CopyJobInfo.of(DESTINATION_TABLE, SOURCE_TABLE); + assertEquals(DESTINATION_TABLE, job.destinationTable()); + assertEquals(ImmutableList.of(SOURCE_TABLE), job.sourceTables()); + job = CopyJobInfo.of(JOB_ID, DESTINATION_TABLE, SOURCE_TABLES); + assertEquals(JOB_ID, job.jobId()); + assertEquals(DESTINATION_TABLE, job.destinationTable()); + assertEquals(SOURCE_TABLES, job.sourceTables()); + job = CopyJobInfo.of(JOB_ID, DESTINATION_TABLE, SOURCE_TABLE); + assertEquals(JOB_ID, job.jobId()); + assertEquals(DESTINATION_TABLE, job.destinationTable()); + assertEquals(ImmutableList.of(SOURCE_TABLE), job.sourceTables()); + } + + @Test + public void testToBuilderIncomplete() { + CopyJobInfo job = CopyJobInfo.of(DESTINATION_TABLE, SOURCE_TABLES); + compareCopyJobInfo(job, job.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(ETAG, COPY_JOB_INFO_MULTIPLE_TABLES.etag()); + assertEquals(ID, COPY_JOB_INFO_MULTIPLE_TABLES.id()); + assertEquals(SELF_LINK, COPY_JOB_INFO_MULTIPLE_TABLES.selfLink()); + assertEquals(EMAIL, COPY_JOB_INFO_MULTIPLE_TABLES.userEmail()); + assertEquals(JOB_ID, COPY_JOB_INFO_MULTIPLE_TABLES.jobId()); + assertEquals(JOB_STATUS, COPY_JOB_INFO_MULTIPLE_TABLES.status()); + assertEquals(DESTINATION_TABLE, COPY_JOB_INFO_MULTIPLE_TABLES.destinationTable()); + assertEquals(SOURCE_TABLES, COPY_JOB_INFO_MULTIPLE_TABLES.sourceTables()); + assertEquals(CREATE_DISPOSITION, COPY_JOB_INFO_MULTIPLE_TABLES.createDisposition()); + assertEquals(WRITE_DISPOSITION, COPY_JOB_INFO_MULTIPLE_TABLES.writeDisposition()); + assertEquals(ETAG, COPY_JOB.etag()); + assertEquals(ID, COPY_JOB.id()); + assertEquals(SELF_LINK, COPY_JOB.selfLink()); + assertEquals(EMAIL, COPY_JOB.userEmail()); + assertEquals(JOB_ID, COPY_JOB.jobId()); + assertEquals(JOB_STATUS, COPY_JOB.status()); + assertEquals(DESTINATION_TABLE, COPY_JOB.destinationTable()); + assertEquals(ImmutableList.of(SOURCE_TABLE), COPY_JOB.sourceTables()); + assertEquals(CREATE_DISPOSITION, COPY_JOB.createDisposition()); + assertEquals(WRITE_DISPOSITION, COPY_JOB.writeDisposition()); + assertEquals(JOB_STATISTICS, COPY_JOB.statistics()); + } + + @Test + public void testToPbAndFromPb() { + assertNotNull(COPY_JOB.toPb().getConfiguration().getCopy()); + assertNull(COPY_JOB.toPb().getConfiguration().getExtract()); + assertNull(COPY_JOB.toPb().getConfiguration().getLoad()); + assertNull(COPY_JOB.toPb().getConfiguration().getQuery()); + assertNull(COPY_JOB.toPb().getConfiguration().getCopy().getSourceTables()); + assertEquals(JOB_STATISTICS, JobStatistics.fromPb(COPY_JOB.statistics().toPb())); + assertNull(COPY_JOB_INFO_MULTIPLE_TABLES.toPb().getConfiguration().getCopy().getSourceTable()); + compareCopyJobInfo(COPY_JOB, CopyJobInfo.fromPb(COPY_JOB.toPb())); + compareCopyJobInfo(COPY_JOB, (CopyJobInfo) JobInfo.fromPb(COPY_JOB.toPb())); + compareCopyJobInfo(COPY_JOB_INFO_MULTIPLE_TABLES, + CopyJobInfo.fromPb(COPY_JOB_INFO_MULTIPLE_TABLES.toPb())); + compareCopyJobInfo(COPY_JOB_INFO_MULTIPLE_TABLES, + (CopyJobInfo) JobInfo.fromPb(COPY_JOB_INFO_MULTIPLE_TABLES.toPb())); + CopyJobInfo job = CopyJobInfo.of(DESTINATION_TABLE, SOURCE_TABLES); + compareCopyJobInfo(job, CopyJobInfo.fromPb(job.toPb())); + compareCopyJobInfo(job, (CopyJobInfo) JobInfo.fromPb(job.toPb())); + } + + private void compareCopyJobInfo(CopyJobInfo expected, CopyJobInfo value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.id(), value.id()); + assertEquals(expected.jobId(), value.jobId()); + assertEquals(expected.selfLink(), value.selfLink()); + assertEquals(expected.status(), value.status()); + assertEquals(expected.statistics(), value.statistics()); + assertEquals(expected.userEmail(), value.userEmail()); + assertEquals(expected.destinationTable(), value.destinationTable()); + assertEquals(expected.sourceTables(), value.sourceTables()); + assertEquals(expected.createDisposition(), value.createDisposition()); + assertEquals(expected.writeDisposition(), value.writeDisposition()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/CsvOptionsTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/CsvOptionsTest.java new file mode 100644 index 000000000000..371202174431 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/CsvOptionsTest.java @@ -0,0 +1,87 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; + +public class CsvOptionsTest { + + private static final Boolean ALLOW_JAGGED_ROWS = true; + private static final Boolean ALLOW_QUOTED_NEWLINE = true; + private static final Charset ENCODING = StandardCharsets.UTF_8; + private static final String FIELD_DELIMITER = ","; + private static final String QUOTE = "\""; + private static final Integer SKIP_LEADING_ROWS = 42; + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder() + .allowJaggedRows(ALLOW_JAGGED_ROWS) + .allowQuotedNewLines(ALLOW_QUOTED_NEWLINE) + .encoding(ENCODING) + .fieldDelimiter(FIELD_DELIMITER) + .quote(QUOTE) + .skipLeadingRows(SKIP_LEADING_ROWS) + .build(); + + @Test + public void testToBuilder() { + compareCsvOptions(CSV_OPTIONS, CSV_OPTIONS.toBuilder().build()); + CsvOptions csvOptions = CSV_OPTIONS.toBuilder() + .fieldDelimiter(";") + .build(); + assertEquals(";", csvOptions.fieldDelimiter()); + csvOptions = csvOptions.toBuilder().fieldDelimiter(",").build(); + compareCsvOptions(CSV_OPTIONS, csvOptions); + } + + @Test + public void testToBuilderIncomplete() { + CsvOptions csvOptions = CsvOptions.builder().fieldDelimiter("|").build(); + assertEquals(csvOptions, csvOptions.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(FormatOptions.CSV, CSV_OPTIONS.type()); + assertEquals(ALLOW_JAGGED_ROWS, CSV_OPTIONS.allowJaggedRows()); + assertEquals(ALLOW_QUOTED_NEWLINE, CSV_OPTIONS.allowQuotedNewLines()); + assertEquals(ENCODING.name(), CSV_OPTIONS.encoding()); + assertEquals(FIELD_DELIMITER, CSV_OPTIONS.fieldDelimiter()); + assertEquals(QUOTE, CSV_OPTIONS.quote()); + assertEquals(SKIP_LEADING_ROWS, CSV_OPTIONS.skipLeadingRows()); + } + + @Test + public void testToAndFromPb() { + compareCsvOptions(CSV_OPTIONS, CsvOptions.fromPb(CSV_OPTIONS.toPb())); + CsvOptions csvOptions = CsvOptions.builder().allowJaggedRows(ALLOW_JAGGED_ROWS).build(); + compareCsvOptions(csvOptions, CsvOptions.fromPb(csvOptions.toPb())); + } + + private void compareCsvOptions(CsvOptions expected, CsvOptions value) { + assertEquals(expected, value); + assertEquals(expected.allowJaggedRows(), value.allowJaggedRows()); + assertEquals(expected.allowQuotedNewLines(), value.allowQuotedNewLines()); + assertEquals(expected.encoding(), value.encoding()); + assertEquals(expected.fieldDelimiter(), value.fieldDelimiter()); + assertEquals(expected.quote(), value.quote()); + assertEquals(expected.skipLeadingRows(), value.skipLeadingRows()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetIdTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetIdTest.java new file mode 100644 index 000000000000..0af665895d71 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetIdTest.java @@ -0,0 +1,54 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class DatasetIdTest { + + private static final DatasetId DATASET = DatasetId.of("dataset"); + private static final DatasetId DATASET_COMPLETE = DatasetId.of("project", "dataset"); + + @Test + public void testOf() { + assertEquals(null, DATASET.project()); + assertEquals("dataset", DATASET.dataset()); + assertEquals("project", DATASET_COMPLETE.project()); + assertEquals("dataset", DATASET_COMPLETE.dataset()); + } + + @Test + public void testEquals() { + compareDatasetIds(DATASET, DatasetId.of("dataset")); + compareDatasetIds(DATASET_COMPLETE, DatasetId.of("project", "dataset")); + } + + @Test + public void testToPbAndFromPb() { + compareDatasetIds(DATASET, DatasetId.fromPb(DATASET.toPb())); + compareDatasetIds(DATASET_COMPLETE, DatasetId.fromPb(DATASET_COMPLETE.toPb())); + } + + private void compareDatasetIds(DatasetId expected, DatasetId value) { + assertEquals(expected, value); + assertEquals(expected.project(), value.project()); + assertEquals(expected.dataset(), value.dataset()); + assertEquals(expected.hashCode(), value.hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetInfoTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetInfoTest.java new file mode 100644 index 000000000000..43c80f6afe83 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/DatasetInfoTest.java @@ -0,0 +1,127 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class DatasetInfoTest { + + private static final List ACCESS_RULES = ImmutableList.of( + new Acl(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER), + new Acl(new Acl.View(TableId.of("project", "dataset", "table")), Acl.Role.WRITER)); + private static final Long CREATION_TIME = System.currentTimeMillis(); + private static final Long DEFAULT_TABLE_EXPIRATION = CREATION_TIME + 100; + private static final String DESCRIPTION = "description"; + private static final String ETAG = "0xFF00"; + private static final String FRIENDLY_NAME = "friendlyDataset"; + private static final String ID = "P/D:1"; + private static final Long LAST_MODIFIED = CREATION_TIME + 50; + private static final String LOCATION = ""; + private static final String SELF_LINK = "http://bigquery/p/d"; + private static final DatasetId DATASET_ID = DatasetId.of("dataset"); + private static final DatasetId DATASET_ID_COMPLETE = DatasetId.of("project", "dataset"); + private static final DatasetInfo DATASET_INFO = DatasetInfo.builder(DATASET_ID) + .acl(ACCESS_RULES) + .creationTime(CREATION_TIME) + .defaultTableLifetime(DEFAULT_TABLE_EXPIRATION) + .description(DESCRIPTION) + .etag(ETAG) + .friendlyName(FRIENDLY_NAME) + .id(ID) + .lastModified(LAST_MODIFIED) + .location(LOCATION) + .selfLink(SELF_LINK) + .build(); + private static final DatasetInfo DATASET_INFO_COMPLETE = DATASET_INFO.toBuilder() + .datasetId(DATASET_ID_COMPLETE) + .build(); + + @Test + public void testToBuilder() { + compareDatasets(DATASET_INFO, DATASET_INFO.toBuilder().build()); + DatasetInfo datasetInfo = DATASET_INFO.toBuilder() + .datasetId(DatasetId.of("dataset2")) + .description("description2") + .build(); + assertEquals(DatasetId.of("dataset2"), datasetInfo.datasetId()); + assertEquals("description2", datasetInfo.description()); + datasetInfo = datasetInfo.toBuilder().datasetId(DATASET_ID).description("description").build(); + compareDatasets(DATASET_INFO, datasetInfo); + } + + @Test + public void testToBuilderIncomplete() { + DatasetInfo datasetInfo = DatasetInfo.builder(DATASET_ID).build(); + assertEquals(datasetInfo, datasetInfo.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertNull(DATASET_INFO.datasetId().project()); + assertEquals(DATASET_ID, DATASET_INFO.datasetId()); + assertEquals(ACCESS_RULES, DATASET_INFO.acl()); + assertEquals(CREATION_TIME, DATASET_INFO.creationTime()); + assertEquals(DEFAULT_TABLE_EXPIRATION, DATASET_INFO.defaultTableLifetime()); + assertEquals(DESCRIPTION, DATASET_INFO.description()); + assertEquals(ETAG, DATASET_INFO.etag()); + assertEquals(FRIENDLY_NAME, DATASET_INFO.friendlyName()); + assertEquals(ID, DATASET_INFO.id()); + assertEquals(LAST_MODIFIED, DATASET_INFO.lastModified()); + assertEquals(LOCATION, DATASET_INFO.location()); + assertEquals(SELF_LINK, DATASET_INFO.selfLink()); + assertEquals(DATASET_ID_COMPLETE, DATASET_INFO_COMPLETE.datasetId()); + assertEquals(ACCESS_RULES, DATASET_INFO_COMPLETE.acl()); + assertEquals(CREATION_TIME, DATASET_INFO_COMPLETE.creationTime()); + assertEquals(DEFAULT_TABLE_EXPIRATION, DATASET_INFO_COMPLETE.defaultTableLifetime()); + assertEquals(DESCRIPTION, DATASET_INFO_COMPLETE.description()); + assertEquals(ETAG, DATASET_INFO_COMPLETE.etag()); + assertEquals(FRIENDLY_NAME, DATASET_INFO_COMPLETE.friendlyName()); + assertEquals(ID, DATASET_INFO_COMPLETE.id()); + assertEquals(LAST_MODIFIED, DATASET_INFO_COMPLETE.lastModified()); + assertEquals(LOCATION, DATASET_INFO_COMPLETE.location()); + assertEquals(SELF_LINK, DATASET_INFO_COMPLETE.selfLink()); + } + + @Test + public void testToPbAndFromPb() { + compareDatasets(DATASET_INFO_COMPLETE, DatasetInfo.fromPb(DATASET_INFO_COMPLETE.toPb())); + DatasetInfo datasetInfo = DatasetInfo.builder("project", "dataset").build(); + compareDatasets(datasetInfo, DatasetInfo.fromPb(datasetInfo.toPb())); + } + + private void compareDatasets(DatasetInfo expected, DatasetInfo value) { + assertEquals(expected, value); + assertEquals(expected.datasetId(), value.datasetId()); + assertEquals(expected.description(), value.description()); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.friendlyName(), value.friendlyName()); + assertEquals(expected.id(), value.id()); + assertEquals(expected.location(), value.location()); + assertEquals(expected.selfLink(), value.selfLink()); + assertEquals(expected.acl(), value.acl()); + assertEquals(expected.creationTime(), value.creationTime()); + assertEquals(expected.defaultTableLifetime(), value.defaultTableLifetime()); + assertEquals(expected.lastModified(), value.lastModified()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ExternalDataConfigurationTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ExternalDataConfigurationTest.java new file mode 100644 index 000000000000..f9b7c31e1071 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ExternalDataConfigurationTest.java @@ -0,0 +1,103 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class ExternalDataConfigurationTest { + + private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); + private static final Integer MAX_BAD_RECORDS = 42; + private static final Boolean IGNORE_UNKNOWN_VALUES = true; + private static final String COMPRESSION = "GZIP"; + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder().build(); + private static final ExternalDataConfiguration CONFIGURATION = ExternalDataConfiguration + .builder(SOURCE_URIS, TABLE_SCHEMA, CSV_OPTIONS) + .compression(COMPRESSION) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .maxBadRecords(MAX_BAD_RECORDS) + .build(); + + @Test + public void testToBuilder() { + compareConfiguration(CONFIGURATION, CONFIGURATION.toBuilder().build()); + ExternalDataConfiguration configuration = CONFIGURATION.toBuilder().compression("NONE").build(); + assertEquals("NONE", configuration.compression()); + configuration = configuration.toBuilder() + .compression(COMPRESSION) + .build(); + compareConfiguration(CONFIGURATION, configuration); + } + + @Test + public void testToBuilderIncomplete() { + ExternalDataConfiguration configuration = + ExternalDataConfiguration.of(SOURCE_URIS, TABLE_SCHEMA, FormatOptions.json()); + assertEquals(configuration, configuration.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(COMPRESSION, CONFIGURATION.compression()); + assertEquals(CSV_OPTIONS, CONFIGURATION.formatOptions()); + assertEquals(IGNORE_UNKNOWN_VALUES, CONFIGURATION.ignoreUnknownValues()); + assertEquals(MAX_BAD_RECORDS, CONFIGURATION.maxBadRecords()); + assertEquals(TABLE_SCHEMA, CONFIGURATION.schema()); + assertEquals(SOURCE_URIS, CONFIGURATION.sourceUris()); + } + + @Test + public void testToAndFromPb() { + compareConfiguration(CONFIGURATION, ExternalDataConfiguration.fromPb(CONFIGURATION.toPb())); + ExternalDataConfiguration configuration = + ExternalDataConfiguration.builder(SOURCE_URIS, TABLE_SCHEMA, CSV_OPTIONS).build(); + compareConfiguration(configuration, ExternalDataConfiguration.fromPb(configuration.toPb())); + } + + private void compareConfiguration(ExternalDataConfiguration expected, + ExternalDataConfiguration value) { + assertEquals(expected, value); + assertEquals(expected.compression(), value.compression()); + assertEquals(expected.formatOptions(), value.formatOptions()); + assertEquals(expected.ignoreUnknownValues(), value.ignoreUnknownValues()); + assertEquals(expected.maxBadRecords(), value.maxBadRecords()); + assertEquals(expected.schema(), value.schema()); + assertEquals(expected.sourceUris(), value.sourceUris()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ExtractJobInfoTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ExtractJobInfoTest.java new file mode 100644 index 000000000000..e73975c6c3ab --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ExtractJobInfoTest.java @@ -0,0 +1,182 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.JobStatistics.ExtractStatistics; + +import org.junit.Test; + +import java.util.List; + +public class ExtractJobInfoTest { + + private static final String ETAG = "etag"; + private static final String ID = "id"; + private static final String SELF_LINK = "selfLink"; + private static final String EMAIL = "email"; + private static final List DESTINATION_URIS = ImmutableList.of("uri1", "uri2"); + private static final String DESTINATION_URI = "uri1"; + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final String FIELD_DELIMITER = ","; + private static final String FORMAT = "CSV"; + private static final Boolean PRINT_HEADER = true; + private static final String COMPRESSION = "GZIP"; + private static final JobId JOB_ID = JobId.of("job"); + private static final JobStatus JOB_STATUS = new JobStatus(JobStatus.State.DONE); + private static final ExtractStatistics JOB_STATISTICS = ExtractStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .destinationUriFileCounts(ImmutableList.of(42L)) + .build(); + private static final ExtractJobInfo EXTRACT_JOB = + ExtractJobInfo.builder(TABLE_ID, DESTINATION_URIS) + .etag(ETAG) + .id(ID) + .selfLink(SELF_LINK) + .userEmail(EMAIL) + .jobId(JOB_ID) + .status(JOB_STATUS) + .printHeader(PRINT_HEADER) + .fieldDelimiter(FIELD_DELIMITER) + .compression(COMPRESSION) + .format(FORMAT) + .statistics(JOB_STATISTICS) + .build(); + private static final ExtractJobInfo EXTRACT_JOB_ONE_URI = + ExtractJobInfo.builder(TABLE_ID, DESTINATION_URI) + .etag(ETAG) + .id(ID) + .selfLink(SELF_LINK) + .userEmail(EMAIL) + .jobId(JOB_ID) + .status(JOB_STATUS) + .printHeader(PRINT_HEADER) + .fieldDelimiter(FIELD_DELIMITER) + .compression(COMPRESSION) + .format(FORMAT) + .build(); + + @Test + public void testToBuilder() { + compareExtractJobInfo(EXTRACT_JOB, EXTRACT_JOB.toBuilder().build()); + ExtractJobInfo job = EXTRACT_JOB.toBuilder() + .sourceTable(TableId.of("dataset", "newTable")) + .build(); + assertEquals("newTable", job.sourceTable().table()); + job = job.toBuilder().sourceTable(TABLE_ID).build(); + compareExtractJobInfo(EXTRACT_JOB, job); + } + + @Test + public void testOf() { + ExtractJobInfo job = ExtractJobInfo.of(TABLE_ID, DESTINATION_URIS); + assertEquals(TABLE_ID, job.sourceTable()); + assertEquals(DESTINATION_URIS, job.destinationUris()); + job = ExtractJobInfo.of(TABLE_ID, DESTINATION_URI); + assertEquals(TABLE_ID, job.sourceTable()); + assertEquals(ImmutableList.of(DESTINATION_URI), job.destinationUris()); + job = ExtractJobInfo.of(JOB_ID, TABLE_ID, DESTINATION_URIS); + assertEquals(JOB_ID, job.jobId()); + assertEquals(TABLE_ID, job.sourceTable()); + assertEquals(DESTINATION_URIS, job.destinationUris()); + job = ExtractJobInfo.of(JOB_ID, TABLE_ID, DESTINATION_URI); + assertEquals(JOB_ID, job.jobId()); + assertEquals(TABLE_ID, job.sourceTable()); + assertEquals(ImmutableList.of(DESTINATION_URI), job.destinationUris()); + } + + @Test + public void testToBuilderIncomplete() { + ExtractJobInfo job = ExtractJobInfo.of(TABLE_ID, DESTINATION_URIS); + compareExtractJobInfo(job, job.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(ETAG, EXTRACT_JOB.etag()); + assertEquals(ID, EXTRACT_JOB.id()); + assertEquals(SELF_LINK, EXTRACT_JOB.selfLink()); + assertEquals(EMAIL, EXTRACT_JOB.userEmail()); + assertEquals(JOB_ID, EXTRACT_JOB.jobId()); + assertEquals(JOB_STATUS, EXTRACT_JOB.status()); + assertEquals(TABLE_ID, EXTRACT_JOB.sourceTable()); + assertEquals(DESTINATION_URIS, EXTRACT_JOB.destinationUris()); + assertEquals(FIELD_DELIMITER, EXTRACT_JOB.fieldDelimiter()); + assertEquals(COMPRESSION, EXTRACT_JOB.compression()); + assertEquals(PRINT_HEADER, EXTRACT_JOB.printHeader()); + assertEquals(FORMAT, EXTRACT_JOB.format()); + assertEquals(JOB_STATISTICS, EXTRACT_JOB.statistics()); + assertEquals(ETAG, EXTRACT_JOB_ONE_URI.etag()); + assertEquals(ID, EXTRACT_JOB_ONE_URI.id()); + assertEquals(SELF_LINK, EXTRACT_JOB_ONE_URI.selfLink()); + assertEquals(EMAIL, EXTRACT_JOB_ONE_URI.userEmail()); + assertEquals(JOB_ID, EXTRACT_JOB_ONE_URI.jobId()); + assertEquals(JOB_STATUS, EXTRACT_JOB_ONE_URI.status()); + assertEquals(TABLE_ID, EXTRACT_JOB_ONE_URI.sourceTable()); + assertEquals(ImmutableList.of(DESTINATION_URI), + EXTRACT_JOB_ONE_URI.destinationUris()); + assertEquals(FIELD_DELIMITER, EXTRACT_JOB_ONE_URI.fieldDelimiter()); + assertEquals(COMPRESSION, EXTRACT_JOB_ONE_URI.compression()); + assertEquals(PRINT_HEADER, EXTRACT_JOB_ONE_URI.printHeader()); + assertEquals(FORMAT, EXTRACT_JOB_ONE_URI.format()); + } + + @Test + public void testToPbAndFromPb() { + assertNotNull(EXTRACT_JOB.toPb().getConfiguration().getExtract()); + assertNull(EXTRACT_JOB.toPb().getConfiguration().getCopy()); + assertNull(EXTRACT_JOB.toPb().getConfiguration().getLoad()); + assertNull(EXTRACT_JOB.toPb().getConfiguration().getQuery()); + assertEquals(JOB_STATISTICS, JobStatistics.fromPb(EXTRACT_JOB.toPb().getStatistics())); + compareExtractJobInfo(EXTRACT_JOB, + ExtractJobInfo.fromPb(EXTRACT_JOB.toPb())); + compareExtractJobInfo(EXTRACT_JOB, + (ExtractJobInfo) JobInfo.fromPb(EXTRACT_JOB.toPb())); + compareExtractJobInfo(EXTRACT_JOB_ONE_URI, + ExtractJobInfo.fromPb(EXTRACT_JOB_ONE_URI.toPb())); + compareExtractJobInfo(EXTRACT_JOB_ONE_URI, + (ExtractJobInfo) JobInfo.fromPb(EXTRACT_JOB_ONE_URI.toPb())); + ExtractJobInfo job = ExtractJobInfo.of(TABLE_ID, DESTINATION_URIS); + compareExtractJobInfo(job, ExtractJobInfo.fromPb(job.toPb())); + compareExtractJobInfo(job, (ExtractJobInfo) JobInfo.fromPb(job.toPb())); + } + + private void compareExtractJobInfo(ExtractJobInfo expected, ExtractJobInfo value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.id(), value.id()); + assertEquals(expected.jobId(), value.jobId()); + assertEquals(expected.selfLink(), value.selfLink()); + assertEquals(expected.status(), value.status()); + assertEquals(expected.statistics(), value.statistics()); + assertEquals(expected.userEmail(), value.userEmail()); + assertEquals(expected.sourceTable(), value.sourceTable()); + assertEquals(expected.destinationUris(), value.destinationUris()); + assertEquals(expected.compression(), value.compression()); + assertEquals(expected.printHeader(), value.printHeader()); + assertEquals(expected.fieldDelimiter(), value.fieldDelimiter()); + assertEquals(expected.format(), value.format()); + } +} \ No newline at end of file diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FieldTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FieldTest.java new file mode 100644 index 000000000000..5f039eaed206 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FieldTest.java @@ -0,0 +1,106 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +public class FieldTest { + + private static final String FIELD_NAME1 = "StringField"; + private static final String FIELD_NAME2 = "IntegerField"; + private static final String FIELD_NAME3 = "RecordField"; + private static final Field.Type FIELD_TYPE1 = Field.Type.string(); + private static final Field.Type FIELD_TYPE2 = Field.Type.integer(); + private static final Field.Mode FIELD_MODE1 = Field.Mode.NULLABLE; + private static final Field.Mode FIELD_MODE2 = Field.Mode.REPEATED; + private static final Field.Mode FIELD_MODE3 = Field.Mode.REQUIRED; + private static final String FIELD_DESCRIPTION1 = "FieldDescription1"; + private static final String FIELD_DESCRIPTION2 = "FieldDescription2"; + private static final String FIELD_DESCRIPTION3 = "FieldDescription3"; + private static final Field FIELD_SCHEMA1 = Field.builder(FIELD_NAME1, FIELD_TYPE1) + .mode(FIELD_MODE1) + .description(FIELD_DESCRIPTION1) + .build(); + private static final Field FIELD_SCHEMA2 = Field.builder(FIELD_NAME2, FIELD_TYPE2) + .mode(FIELD_MODE2) + .description(FIELD_DESCRIPTION2) + .build(); + private static final Field.Type FIELD_TYPE3 = + Field.Type.record(ImmutableList.of(FIELD_SCHEMA1, FIELD_SCHEMA2)); + private static final Field FIELD_SCHEMA3 = Field + .builder(FIELD_NAME3, FIELD_TYPE3) + .mode(FIELD_MODE3) + .description(FIELD_DESCRIPTION3) + .build(); + + @Test + public void testToBuilder() { + compareFieldSchemas(FIELD_SCHEMA1, FIELD_SCHEMA1.toBuilder().build()); + compareFieldSchemas(FIELD_SCHEMA2, FIELD_SCHEMA2.toBuilder().build()); + compareFieldSchemas(FIELD_SCHEMA3, FIELD_SCHEMA3.toBuilder().build()); + Field field = FIELD_SCHEMA1.toBuilder() + .description("New Description") + .build(); + assertEquals("New Description", field.description()); + field = field.toBuilder().description(FIELD_DESCRIPTION1).build(); + compareFieldSchemas(FIELD_SCHEMA1, field); + } + + @Test + public void testToBuilderIncomplete() { + Field field = Field.of(FIELD_NAME1, FIELD_TYPE1); + compareFieldSchemas(field, field.toBuilder().build()); + field = Field.of(FIELD_NAME2, FIELD_TYPE3); + compareFieldSchemas(field, field.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(FIELD_NAME1, FIELD_SCHEMA1.name()); + assertEquals(FIELD_TYPE1, FIELD_SCHEMA1.type()); + assertEquals(FIELD_MODE1, FIELD_SCHEMA1.mode()); + assertEquals(FIELD_DESCRIPTION1, FIELD_SCHEMA1.description()); + assertEquals(null, FIELD_SCHEMA1.fields()); + assertEquals(FIELD_NAME3, FIELD_SCHEMA3.name()); + assertEquals(FIELD_TYPE3, FIELD_SCHEMA3.type()); + assertEquals(FIELD_MODE3, FIELD_SCHEMA3.mode()); + assertEquals(FIELD_DESCRIPTION3, FIELD_SCHEMA3.description()); + assertEquals(ImmutableList.of(FIELD_SCHEMA1, FIELD_SCHEMA2), FIELD_SCHEMA3.fields()); + } + + @Test + public void testToAndFromPb() { + compareFieldSchemas(FIELD_SCHEMA1, Field.fromPb(FIELD_SCHEMA1.toPb())); + compareFieldSchemas(FIELD_SCHEMA2, Field.fromPb(FIELD_SCHEMA2.toPb())); + compareFieldSchemas(FIELD_SCHEMA3, Field.fromPb(FIELD_SCHEMA3.toPb())); + Field field = Field.builder(FIELD_NAME1, FIELD_TYPE1).build(); + compareFieldSchemas(field, Field.fromPb(field.toPb())); + } + + private void compareFieldSchemas(Field expected, Field value) { + assertEquals(expected, value); + assertEquals(expected.name(), value.name()); + assertEquals(expected.type(), value.type()); + assertEquals(expected.mode(), value.mode()); + assertEquals(expected.description(), value.description()); + assertEquals(expected.fields(), value.fields()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FieldValueTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FieldValueTest.java new file mode 100644 index 000000000000..d6d879dbd58f --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FieldValueTest.java @@ -0,0 +1,111 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; + +import com.google.api.client.util.Data; +import com.google.api.services.bigquery.model.TableCell; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; + +import org.junit.Test; + +import java.util.Map; + +public class FieldValueTest { + + private static final TableCell BOOLEAN_FIELD = new TableCell().setV("false"); + private static final Map INTEGER_FIELD = ImmutableMap.of("v", "1"); + private static final Map FLOAT_FIELD = ImmutableMap.of("v", "1.5"); + private static final Map STRING_FIELD = ImmutableMap.of("v", "string"); + private static final Map TIMESTAMP_FIELD = ImmutableMap.of("v", "42"); + private static final Map NULL_FIELD = + ImmutableMap.of("v", Data.nullOf(String.class)); + private static final Map REPEATED_FIELD = + ImmutableMap.of("v", ImmutableList.of(INTEGER_FIELD, INTEGER_FIELD)); + private static final Map RECORD_FIELD = + ImmutableMap.of("f", ImmutableList.of(FLOAT_FIELD, TIMESTAMP_FIELD)); + + @Test + public void testFromPb() { + FieldValue value = FieldValue.fromPb(BOOLEAN_FIELD); + assertEquals(FieldValue.Attribute.PRIMITIVE, value.attribute()); + assertFalse(value.booleanValue()); + value = FieldValue.fromPb(INTEGER_FIELD); + assertEquals(FieldValue.Attribute.PRIMITIVE, value.attribute()); + assertEquals(1, value.longValue()); + value = FieldValue.fromPb(FLOAT_FIELD); + assertEquals(FieldValue.Attribute.PRIMITIVE, value.attribute()); + assertEquals(1.5, value.doubleValue(), 0); + value = FieldValue.fromPb(STRING_FIELD); + assertEquals(FieldValue.Attribute.PRIMITIVE, value.attribute()); + assertEquals("string", value.stringValue()); + value = FieldValue.fromPb(TIMESTAMP_FIELD); + assertEquals(FieldValue.Attribute.PRIMITIVE, value.attribute()); + assertEquals(42000000, value.timestampValue()); + value = FieldValue.fromPb(NULL_FIELD); + assertNull(value.value()); + value = FieldValue.fromPb(REPEATED_FIELD); + assertEquals(FieldValue.Attribute.REPEATED, value.attribute()); + assertEquals(FieldValue.fromPb(INTEGER_FIELD), value.repeatedValue().get(0)); + assertEquals(FieldValue.fromPb(INTEGER_FIELD), value.repeatedValue().get(1)); + value = FieldValue.fromPb(RECORD_FIELD); + assertEquals(FieldValue.Attribute.RECORD, value.attribute()); + assertEquals(FieldValue.fromPb(FLOAT_FIELD), value.repeatedValue().get(0)); + assertEquals(FieldValue.fromPb(TIMESTAMP_FIELD), value.repeatedValue().get(1)); + } + + @Test + public void testEquals() { + FieldValue booleanValue = new FieldValue(FieldValue.Attribute.PRIMITIVE, "false"); + assertEquals(booleanValue, FieldValue.fromPb(BOOLEAN_FIELD)); + assertEquals(booleanValue.hashCode(), FieldValue.fromPb(BOOLEAN_FIELD).hashCode()); + + FieldValue integerValue = new FieldValue(FieldValue.Attribute.PRIMITIVE, "1"); + assertEquals(integerValue, FieldValue.fromPb(INTEGER_FIELD)); + assertEquals(integerValue.hashCode(), FieldValue.fromPb(INTEGER_FIELD).hashCode()); + + FieldValue floatValue = new FieldValue(FieldValue.Attribute.PRIMITIVE, "1.5"); + assertEquals(floatValue, FieldValue.fromPb(FLOAT_FIELD)); + assertEquals(floatValue.hashCode(), FieldValue.fromPb(FLOAT_FIELD).hashCode()); + + FieldValue stringValue = new FieldValue(FieldValue.Attribute.PRIMITIVE, "string"); + assertEquals(stringValue, FieldValue.fromPb(STRING_FIELD)); + assertEquals(stringValue.hashCode(), FieldValue.fromPb(STRING_FIELD).hashCode()); + + FieldValue timestampValue = new FieldValue(FieldValue.Attribute.PRIMITIVE, "42"); + assertEquals(timestampValue, FieldValue.fromPb(TIMESTAMP_FIELD)); + assertEquals(timestampValue.hashCode(), FieldValue.fromPb(TIMESTAMP_FIELD).hashCode()); + + FieldValue nullValue = new FieldValue(FieldValue.Attribute.PRIMITIVE, null); + assertEquals(nullValue, FieldValue.fromPb(NULL_FIELD)); + assertEquals(nullValue.hashCode(), FieldValue.fromPb(NULL_FIELD).hashCode()); + + FieldValue repeatedValue = new FieldValue(FieldValue.Attribute.REPEATED, + ImmutableList.of(integerValue, integerValue)); + assertEquals(repeatedValue, FieldValue.fromPb(REPEATED_FIELD)); + assertEquals(repeatedValue.hashCode(), FieldValue.fromPb(REPEATED_FIELD).hashCode()); + + FieldValue recordValue = new FieldValue(FieldValue.Attribute.RECORD, + ImmutableList.of(floatValue, timestampValue)); + assertEquals(recordValue, FieldValue.fromPb(RECORD_FIELD)); + assertEquals(recordValue.hashCode(), FieldValue.fromPb(RECORD_FIELD).hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FormatOptionsTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FormatOptionsTest.java new file mode 100644 index 000000000000..df939143156b --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/FormatOptionsTest.java @@ -0,0 +1,52 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class FormatOptionsTest { + + @Test + public void testConstructor() { + FormatOptions options = new FormatOptions(FormatOptions.CSV); + assertEquals(FormatOptions.CSV, options.type()); + options = new FormatOptions(FormatOptions.JSON); + assertEquals(FormatOptions.JSON, options.type()); + options = new FormatOptions(FormatOptions.DATASTORE_BACKUP); + assertEquals(FormatOptions.DATASTORE_BACKUP, options.type()); + } + + @Test + public void testFactoryMethods() { + assertEquals(FormatOptions.CSV, FormatOptions.csv().type()); + assertEquals(FormatOptions.JSON, FormatOptions.json().type()); + assertEquals(FormatOptions.DATASTORE_BACKUP, FormatOptions.datastoreBackup().type()); + } + + @Test + public void testEquals() { + assertEquals(FormatOptions.csv(), FormatOptions.csv()); + assertEquals(FormatOptions.csv().hashCode(), FormatOptions.csv().hashCode()); + assertEquals(FormatOptions.json(), FormatOptions.json()); + assertEquals(FormatOptions.json().hashCode(), FormatOptions.json().hashCode()); + assertEquals(FormatOptions.datastoreBackup(), FormatOptions.datastoreBackup()); + assertEquals(FormatOptions.datastoreBackup().hashCode(), + FormatOptions.datastoreBackup().hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java new file mode 100644 index 000000000000..4a4f01de4124 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java @@ -0,0 +1,785 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.Page; +import com.google.gcloud.bigquery.BigQuery.DatasetOption; +import com.google.gcloud.bigquery.BigQuery.JobListOption; +import com.google.gcloud.bigquery.BigQuery.JobOption; +import com.google.gcloud.bigquery.BigQuery.TableOption; +import com.google.gcloud.bigquery.testing.RemoteBigQueryHelper; +import com.google.gcloud.storage.BlobInfo; +import com.google.gcloud.storage.BucketInfo; +import com.google.gcloud.storage.Storage; +import com.google.gcloud.storage.testing.RemoteGcsHelper; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.Timeout; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.Iterator; +import java.util.List; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.logging.Level; +import java.util.logging.Logger; + +public class ITBigQueryTest { + + private static final Logger log = Logger.getLogger(ITBigQueryTest.class.getName()); + private static final String DATASET = RemoteBigQueryHelper.generateDatasetName(); + private static final String DESCRIPTION = "Test dataset"; + private static final String OTHER_DATASET = RemoteBigQueryHelper.generateDatasetName(); + private static final Field TIMESTAMP_FIELD_SCHEMA = + Field.builder("TimestampField", Field.Type.timestamp()) + .mode(Field.Mode.NULLABLE) + .description("TimestampDescription") + .build(); + private static final Field STRING_FIELD_SCHEMA = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("StringDescription") + .build(); + private static final Field INTEGER_FIELD_SCHEMA = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("IntegerDescription") + .build(); + private static final Field BOOLEAN_FIELD_SCHEMA = + Field.builder("BooleanField", Field.Type.bool()) + .mode(Field.Mode.NULLABLE) + .description("BooleanDescription") + .build(); + private static final Field RECORD_FIELD_SCHEMA = + Field.builder("RecordField", Field.Type.record(TIMESTAMP_FIELD_SCHEMA, + STRING_FIELD_SCHEMA, INTEGER_FIELD_SCHEMA, BOOLEAN_FIELD_SCHEMA)) + .mode(Field.Mode.REQUIRED) + .description("RecordDescription") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(TIMESTAMP_FIELD_SCHEMA, STRING_FIELD_SCHEMA, + INTEGER_FIELD_SCHEMA, BOOLEAN_FIELD_SCHEMA, RECORD_FIELD_SCHEMA); + private static final Schema SIMPLE_SCHEMA = Schema.of(STRING_FIELD_SCHEMA); + private static final Schema QUERY_RESULT_SCHEMA = Schema.builder() + .addField(Field.builder("TimestampField", Field.Type.timestamp()) + .mode(Field.Mode.NULLABLE) + .build()) + .addField(Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .build()) + .addField(Field.builder("BooleanField", Field.Type.bool()) + .mode(Field.Mode.NULLABLE) + .build()) + .build(); + private static final String LOAD_FILE = "load.csv"; + private static final String JSON_LOAD_FILE = "load.json"; + private static final String EXTRACT_FILE = "extract.csv"; + private static final String BUCKET = RemoteGcsHelper.generateBucketName(); + private static final TableId TABLE_ID = TableId.of(DATASET, "testing_table"); + private static final String CSV_CONTENT = "StringValue1\nStringValue2\n"; + private static final String JSON_CONTENT = "{" + + "\"TimestampField\": \"2014-08-19 07:41:35.220 -05:00\"," + + "\"StringField\": \"stringValue\"," + + "\"IntegerField\": [\"0\", \"1\"]," + + "\"BooleanField\": \"false\"," + + "\"RecordField\": {" + + "\"TimestampField\": \"1969-07-20 20:18:04 UTC\"," + + "\"StringField\": null," + + "\"IntegerField\": [\"1\",\"0\"]," + + "\"BooleanField\": \"true\"" + + "}" + + "}\n" + + "{" + + "\"TimestampField\": \"2014-08-19 07:41:35.220 -05:00\"," + + "\"StringField\": \"stringValue\"," + + "\"IntegerField\": [\"0\", \"1\"]," + + "\"BooleanField\": \"false\"," + + "\"RecordField\": {" + + "\"TimestampField\": \"1969-07-20 20:18:04 UTC\"," + + "\"StringField\": null," + + "\"IntegerField\": [\"1\",\"0\"]," + + "\"BooleanField\": \"true\"" + + "}" + + "}"; + + private static BigQuery bigquery; + private static Storage storage; + + @Rule + public Timeout globalTimeout = Timeout.seconds(300); + + @BeforeClass + public static void beforeClass() throws IOException, InterruptedException { + RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); + RemoteGcsHelper gcsHelper = RemoteGcsHelper.create(); + bigquery = bigqueryHelper.options().service(); + storage = gcsHelper.options().service(); + storage.create(BucketInfo.of(BUCKET)); + storage.create(BlobInfo.builder(BUCKET, LOAD_FILE).contentType("text/plain").build(), + CSV_CONTENT.getBytes(StandardCharsets.UTF_8)); + storage.create(BlobInfo.builder(BUCKET, JSON_LOAD_FILE).contentType("application/json").build(), + JSON_CONTENT.getBytes(StandardCharsets.UTF_8)); + DatasetInfo info = DatasetInfo.builder(DATASET).description(DESCRIPTION).build(); + bigquery.create(info); + LoadJobInfo job = LoadJobInfo.builder(TABLE_ID, "gs://" + BUCKET + "/" + JSON_LOAD_FILE) + .createDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .schema(TABLE_SCHEMA) + .formatOptions(FormatOptions.json()) + .build(); + job = bigquery.create(job); + while (job.status().state() != JobStatus.State.DONE) { + Thread.sleep(1000); + job = bigquery.getJob(job.jobId()); + } + assertNull(job.status().error()); + } + + @AfterClass + public static void afterClass() throws ExecutionException, InterruptedException { + if (bigquery != null) { + RemoteBigQueryHelper.forceDelete(bigquery, DATASET); + } + if (storage != null && !RemoteGcsHelper.forceDelete(storage, BUCKET, 10, TimeUnit.SECONDS)) { + if (log.isLoggable(Level.WARNING)) { + log.log(Level.WARNING, "Deletion of bucket {0} timed out, bucket is not empty", BUCKET); + } + } + } + + @Test + public void testGetDataset() { + DatasetInfo dataset = bigquery.getDataset(DATASET); + assertEquals(bigquery.options().projectId(), dataset.datasetId().project()); + assertEquals(DATASET, dataset.datasetId().dataset()); + assertEquals(DESCRIPTION, dataset.description()); + assertNotNull(dataset.acl()); + assertNotNull(dataset.etag()); + assertNotNull(dataset.id()); + assertNotNull(dataset.lastModified()); + assertNotNull(dataset.selfLink()); + } + + @Test + public void testGetDatasetWithSelectedFields() { + DatasetInfo dataset = bigquery.getDataset(DATASET, + DatasetOption.fields(BigQuery.DatasetField.CREATION_TIME)); + assertEquals(bigquery.options().projectId(), dataset.datasetId().project()); + assertEquals(DATASET, dataset.datasetId().dataset()); + assertNotNull(dataset.creationTime()); + assertNull(dataset.description()); + assertNull(dataset.defaultTableLifetime()); + assertNull(dataset.acl()); + assertNull(dataset.etag()); + assertNull(dataset.friendlyName()); + assertNull(dataset.id()); + assertNull(dataset.lastModified()); + assertNull(dataset.location()); + assertNull(dataset.selfLink()); + } + + @Test + public void testUpdateDataset() { + DatasetInfo dataset = bigquery.create(DatasetInfo.builder(OTHER_DATASET) + .description("Some Description") + .build()); + assertNotNull(dataset); + assertEquals(bigquery.options().projectId(), dataset.datasetId().project()); + assertEquals(OTHER_DATASET, dataset.datasetId().dataset()); + assertEquals("Some Description", dataset.description()); + DatasetInfo updatedDataset = + bigquery.update(dataset.toBuilder().description("Updated Description").build()); + assertEquals("Updated Description", updatedDataset.description()); + assertTrue(bigquery.delete(OTHER_DATASET)); + } + + @Test + public void testUpdateDatasetWithSelectedFields() { + DatasetInfo dataset = bigquery.create(DatasetInfo.builder(OTHER_DATASET) + .description("Some Description") + .build()); + assertNotNull(dataset); + assertEquals(bigquery.options().projectId(), dataset.datasetId().project()); + assertEquals(OTHER_DATASET, dataset.datasetId().dataset()); + assertEquals("Some Description", dataset.description()); + DatasetInfo updatedDataset = + bigquery.update(dataset.toBuilder().description("Updated Description").build(), + DatasetOption.fields(BigQuery.DatasetField.DESCRIPTION)); + assertEquals("Updated Description", updatedDataset.description()); + assertNull(updatedDataset.creationTime()); + assertNull(updatedDataset.defaultTableLifetime()); + assertNull(updatedDataset.acl()); + assertNull(updatedDataset.etag()); + assertNull(updatedDataset.friendlyName()); + assertNull(updatedDataset.id()); + assertNull(updatedDataset.lastModified()); + assertNull(updatedDataset.location()); + assertNull(updatedDataset.selfLink()); + assertTrue(bigquery.delete(OTHER_DATASET)); + } + + @Test + public void testCreateAndGetTable() { + String tableName = "test_create_and_get_table"; + TableId tableId = TableId.of(DATASET, tableName); + BaseTableInfo createdTableInfo = bigquery.create(TableInfo.of(tableId, TABLE_SCHEMA)); + assertNotNull(createdTableInfo); + assertEquals(DATASET, createdTableInfo.tableId().dataset()); + assertEquals(tableName, createdTableInfo.tableId().table()); + BaseTableInfo remoteTableInfo = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTableInfo); + assertTrue(remoteTableInfo instanceof TableInfo); + assertEquals(createdTableInfo.tableId(), remoteTableInfo.tableId()); + assertEquals(BaseTableInfo.Type.TABLE, remoteTableInfo.type()); + assertEquals(TABLE_SCHEMA, remoteTableInfo.schema()); + assertNotNull(remoteTableInfo.creationTime()); + assertNotNull(remoteTableInfo.lastModifiedTime()); + assertNotNull(remoteTableInfo.numBytes()); + assertNotNull(remoteTableInfo.numRows()); + assertTrue(bigquery.delete(DATASET, tableName)); + } + + @Test + public void testCreateAndGetTableWithSelectedField() { + String tableName = "test_create_and_get_selected_fields_table"; + TableId tableId = TableId.of(DATASET, tableName); + BaseTableInfo createdTableInfo = bigquery.create(TableInfo.of(tableId, TABLE_SCHEMA)); + assertNotNull(createdTableInfo); + assertEquals(DATASET, createdTableInfo.tableId().dataset()); + assertEquals(tableName, createdTableInfo.tableId().table()); + BaseTableInfo remoteTableInfo = bigquery.getTable(DATASET, tableName, + TableOption.fields(BigQuery.TableField.CREATION_TIME)); + assertNotNull(remoteTableInfo); + assertTrue(remoteTableInfo instanceof TableInfo); + assertEquals(createdTableInfo.tableId(), remoteTableInfo.tableId()); + assertEquals(BaseTableInfo.Type.TABLE, remoteTableInfo.type()); + assertNotNull(remoteTableInfo.creationTime()); + assertNull(remoteTableInfo.schema()); + assertNull(remoteTableInfo.lastModifiedTime()); + assertNull(remoteTableInfo.numBytes()); + assertNull(remoteTableInfo.numRows()); + assertTrue(bigquery.delete(DATASET, tableName)); + } + + @Test + public void testCreateExternalTable() throws InterruptedException { + String tableName = "test_create_external_table"; + TableId tableId = TableId.of(DATASET, tableName); + ExternalDataConfiguration externalDataConfiguration = ExternalDataConfiguration.of( + "gs://" + BUCKET + "/" + JSON_LOAD_FILE, TABLE_SCHEMA, FormatOptions.json()); + BaseTableInfo tableInfo = ExternalTableInfo.of(tableId, externalDataConfiguration); + BaseTableInfo createdTableInfo = bigquery.create(tableInfo); + assertNotNull(createdTableInfo); + assertEquals(DATASET, createdTableInfo.tableId().dataset()); + assertEquals(tableName, createdTableInfo.tableId().table()); + BaseTableInfo remoteTableInfo = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTableInfo); + assertTrue(remoteTableInfo instanceof ExternalTableInfo); + assertEquals(createdTableInfo.tableId(), remoteTableInfo.tableId()); + assertEquals(TABLE_SCHEMA, remoteTableInfo.schema()); + QueryRequest request = QueryRequest.builder( + "SELECT TimestampField, StringField, IntegerField, BooleanField FROM " + DATASET + "." + + tableName) + .defaultDataset(DatasetId.of(DATASET)) + .maxWaitTime(60000L) + .maxResults(1000L) + .build(); + QueryResponse response = bigquery.query(request); + while (!response.jobComplete()) { + response = bigquery.getQueryResults(response.jobId()); + Thread.sleep(1000); + } + long integerValue = 0; + int rowCount = 0; + for (List row : response.result().values()) { + FieldValue timestampCell = row.get(0); + FieldValue stringCell = row.get(1); + FieldValue integerCell = row.get(2); + FieldValue booleanCell = row.get(3); + assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, integerCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute()); + assertEquals(1408452095220000L, timestampCell.timestampValue()); + assertEquals("stringValue", stringCell.stringValue()); + assertEquals(integerValue, integerCell.longValue()); + assertEquals(false, booleanCell.booleanValue()); + integerValue = ~integerValue & 0x1; + rowCount++; + } + assertEquals(4, rowCount); + assertTrue(bigquery.delete(DATASET, tableName)); + } + + @Test + public void testCreateViewTable() throws InterruptedException { + String tableName = "test_create_view_table"; + TableId tableId = TableId.of(DATASET, tableName); + BaseTableInfo tableInfo = ViewInfo.of(tableId, + "SELECT TimestampField, StringField, BooleanField FROM " + DATASET + "." + + TABLE_ID.table()); + BaseTableInfo createdTableInfo = bigquery.create(tableInfo); + assertNotNull(createdTableInfo); + assertEquals(DATASET, createdTableInfo.tableId().dataset()); + assertEquals(tableName, createdTableInfo.tableId().table()); + BaseTableInfo remoteTableInfo = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTableInfo); + assertEquals(createdTableInfo.tableId(), remoteTableInfo.tableId()); + assertTrue(remoteTableInfo instanceof ViewInfo); + Schema expectedSchema = Schema.builder() + .addField( + Field.builder("TimestampField", Field.Type.timestamp()) + .mode(Field.Mode.NULLABLE) + .build()) + .addField( + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .build()) + .addField( + Field.builder("BooleanField", Field.Type.bool()) + .mode(Field.Mode.NULLABLE) + .build()) + .build(); + assertEquals(expectedSchema, remoteTableInfo.schema()); + QueryRequest request = QueryRequest.builder("SELECT * FROM " + tableName) + .defaultDataset(DatasetId.of(DATASET)) + .maxWaitTime(60000L) + .maxResults(1000L) + .build(); + QueryResponse response = bigquery.query(request); + while (!response.jobComplete()) { + response = bigquery.getQueryResults(response.jobId()); + Thread.sleep(1000); + } + int rowCount = 0; + for (List row : response.result().values()) { + FieldValue timestampCell = row.get(0); + FieldValue stringCell = row.get(1); + FieldValue booleanCell = row.get(2); + assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute()); + assertEquals(1408452095220000L, timestampCell.timestampValue()); + assertEquals("stringValue", stringCell.stringValue()); + assertEquals(false, booleanCell.booleanValue()); + rowCount++; + } + assertEquals(2, rowCount); + assertTrue(bigquery.delete(DATASET, tableName)); + } + + @Test + public void testListTables() { + String tableName = "test_list_tables"; + BaseTableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), TABLE_SCHEMA); + BaseTableInfo createdTableInfo = bigquery.create(tableInfo); + assertNotNull(createdTableInfo); + Page tables = bigquery.listTables(DATASET); + boolean found = false; + Iterator tableIterator = tables.values().iterator(); + while (tableIterator.hasNext() && !found) { + if (tableIterator.next().tableId().equals(createdTableInfo.tableId())) { + found = true; + } + } + assertTrue(found); + assertTrue(bigquery.delete(DATASET, tableName)); + } + + @Test + public void testUdpateTable() { + String tableName = "test_update_table"; + BaseTableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), TABLE_SCHEMA); + BaseTableInfo createdTableInfo = bigquery.create(tableInfo); + assertNotNull(createdTableInfo); + BaseTableInfo updatedTableInfo = bigquery.update(tableInfo.toBuilder() + .description("newDescription").build()); + assertEquals(DATASET, updatedTableInfo.tableId().dataset()); + assertEquals(tableName, updatedTableInfo.tableId().table()); + assertEquals(TABLE_SCHEMA, updatedTableInfo.schema()); + assertEquals("newDescription", updatedTableInfo.description()); + assertTrue(bigquery.delete(DATASET, tableName)); + } + + @Test + public void testUdpateTableWithSelectedFields() { + String tableName = "test_update_with_selected_fields_table"; + BaseTableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), TABLE_SCHEMA); + BaseTableInfo createdTableInfo = bigquery.create(tableInfo); + assertNotNull(createdTableInfo); + BaseTableInfo updatedTableInfo = bigquery.update(tableInfo.toBuilder().description("newDescr") + .build(), TableOption.fields(BigQuery.TableField.DESCRIPTION)); + assertTrue(updatedTableInfo instanceof TableInfo); + assertEquals(DATASET, updatedTableInfo.tableId().dataset()); + assertEquals(tableName, updatedTableInfo.tableId().table()); + assertEquals("newDescr", updatedTableInfo.description()); + assertNull(updatedTableInfo.schema()); + assertNull(updatedTableInfo.lastModifiedTime()); + assertNull(updatedTableInfo.numBytes()); + assertNull(updatedTableInfo.numRows()); + assertTrue(bigquery.delete(DATASET, tableName)); + } + + @Test + public void testInsertAll() { + String tableName = "test_insert_all_table"; + BaseTableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), TABLE_SCHEMA); + assertNotNull(bigquery.create(tableInfo)); + InsertAllRequest request = InsertAllRequest.builder(tableInfo.tableId()) + .addRow(ImmutableMap.of( + "TimestampField", "2014-08-19 07:41:35.220 -05:00", + "StringField", "stringValue", + "IntegerField", ImmutableList.of(0, 1), + "BooleanField", false, + "RecordField", ImmutableMap.of( + "TimestampField", "1969-07-20 20:18:04 UTC", + "IntegerField", ImmutableList.of(1, 0), + "BooleanField", true))) + .addRow(ImmutableMap.of( + "TimestampField", "2014-08-19 07:41:35.220 -05:00", + "StringField", "stringValue", + "IntegerField", ImmutableList.of(0, 1), + "BooleanField", false, + "RecordField", ImmutableMap.of( + "TimestampField", "1969-07-20 20:18:04 UTC", + "IntegerField", ImmutableList.of(1, 0), + "BooleanField", true))) + .build(); + InsertAllResponse response = bigquery.insertAll(request); + assertFalse(response.hasErrors()); + assertEquals(0, response.insertErrors().size()); + assertTrue(bigquery.delete(TableId.of(DATASET, tableName))); + } + + @Test + public void testInsertAllWithErrors() { + String tableName = "test_insert_all_with_errors_table"; + BaseTableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), TABLE_SCHEMA); + assertNotNull(bigquery.create(tableInfo)); + InsertAllRequest request = InsertAllRequest.builder(tableInfo.tableId()) + .addRow(ImmutableMap.of( + "TimestampField", "2014-08-19 07:41:35.220 -05:00", + "StringField", "stringValue", + "IntegerField", ImmutableList.of(0, 1), + "BooleanField", false, + "RecordField", ImmutableMap.of( + "TimestampField", "1969-07-20 20:18:04 UTC", + "IntegerField", ImmutableList.of(1, 0), + "BooleanField", true))) + .addRow(ImmutableMap.of( + "TimestampField", "invalidDate", + "StringField", "stringValue", + "IntegerField", ImmutableList.of(0, 1), + "BooleanField", false, + "RecordField", ImmutableMap.of( + "TimestampField", "1969-07-20 20:18:04 UTC", + "IntegerField", ImmutableList.of(1, 0), + "BooleanField", true))) + .addRow(ImmutableMap.of( + "TimestampField", "1969-07-20 20:18:04 UTC", + "StringField", "stringValue", + "IntegerField", ImmutableList.of(0, 1), + "BooleanField", false)) + .skipInvalidRows(true) + .build(); + InsertAllResponse response = bigquery.insertAll(request); + assertTrue(response.hasErrors()); + assertEquals(2, response.insertErrors().size()); + assertNotNull(response.errorsFor(1L)); + assertNotNull(response.errorsFor(2L)); + assertTrue(bigquery.delete(TableId.of(DATASET, tableName))); + } + + @Test + public void testListAllTableData() { + Page> rows = bigquery.listTableData(TABLE_ID); + int rowCount = 0; + for (List row : rows.values()) { + FieldValue timestampCell = row.get(0); + FieldValue stringCell = row.get(1); + FieldValue integerCell = row.get(2); + FieldValue booleanCell = row.get(3); + FieldValue recordCell = row.get(4); + assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute()); + assertEquals(FieldValue.Attribute.REPEATED, integerCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute()); + assertEquals(FieldValue.Attribute.RECORD, recordCell.attribute()); + assertEquals(1408452095220000L, timestampCell.timestampValue()); + assertEquals("stringValue", stringCell.stringValue()); + assertEquals(0, integerCell.repeatedValue().get(0).longValue()); + assertEquals(1, integerCell.repeatedValue().get(1).longValue()); + assertEquals(false, booleanCell.booleanValue()); + assertEquals(-14182916000000L, recordCell.recordValue().get(0).timestampValue()); + assertTrue(recordCell.recordValue().get(1).isNull()); + assertEquals(1, recordCell.recordValue().get(2).repeatedValue().get(0).longValue()); + assertEquals(0, recordCell.recordValue().get(2).repeatedValue().get(1).longValue()); + assertEquals(true, recordCell.recordValue().get(3).booleanValue()); + rowCount++; + } + assertEquals(2, rowCount); + } + + @Test + public void testQuery() throws InterruptedException { + String query = new StringBuilder() + .append("SELECT TimestampField, StringField, BooleanField FROM ") + .append(TABLE_ID.table()) + .toString(); + QueryRequest request = QueryRequest.builder(query) + .defaultDataset(DatasetId.of(DATASET)) + .maxWaitTime(60000L) + .maxResults(1000L) + .build(); + QueryResponse response = bigquery.query(request); + while (!response.jobComplete()) { + Thread.sleep(1000); + response = bigquery.getQueryResults(response.jobId()); + } + assertEquals(QUERY_RESULT_SCHEMA, response.result().schema()); + int rowCount = 0; + for (List row : response.result().values()) { + FieldValue timestampCell = row.get(0); + FieldValue stringCell = row.get(1); + FieldValue booleanCell = row.get(2); + assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute()); + assertEquals(1408452095220000L, timestampCell.timestampValue()); + assertEquals("stringValue", stringCell.stringValue()); + assertEquals(false, booleanCell.booleanValue()); + rowCount++; + } + assertEquals(2, rowCount); + } + + @Test + public void testListJobs() { + Page jobs = bigquery.listJobs(); + for (JobInfo job : jobs.values()) { + assertNotNull(job.jobId()); + assertNotNull(job.statistics()); + assertNotNull(job.status()); + assertNotNull(job.userEmail()); + assertNotNull(job.id()); + } + } + + @Test + public void testListJobsWithSelectedFields() { + Page jobs = bigquery.listJobs(JobListOption.fields(BigQuery.JobField.USER_EMAIL)); + for (JobInfo job : jobs.values()) { + assertNotNull(job.jobId()); + assertNotNull(job.status()); + assertNotNull(job.userEmail()); + assertNull(job.statistics()); + assertNull(job.id()); + } + } + + @Test + public void testCreateAndGetJob() throws InterruptedException { + String sourceTableName = "test_create_and_get_job_source_table"; + String destinationTableName = "test_create_and_get_job_destination_table"; + TableId sourceTable = TableId.of(DATASET, sourceTableName); + BaseTableInfo tableInfo = TableInfo.of(sourceTable, SIMPLE_SCHEMA); + BaseTableInfo createdTableInfo = bigquery.create(tableInfo); + assertNotNull(createdTableInfo); + assertEquals(DATASET, createdTableInfo.tableId().dataset()); + assertEquals(sourceTableName, createdTableInfo.tableId().table()); + TableId destinationTable = TableId.of(DATASET, destinationTableName); + CopyJobInfo job = CopyJobInfo.of(destinationTable, sourceTable); + CopyJobInfo createdJob = bigquery.create(job); + CopyJobInfo remoteJob = bigquery.getJob(createdJob.jobId()); + assertEquals(createdJob.jobId(), remoteJob.jobId()); + assertEquals(createdJob.sourceTables(), remoteJob.sourceTables()); + assertEquals(createdJob.destinationTable(), remoteJob.destinationTable()); + assertEquals(createdJob.createDisposition(), remoteJob.createDisposition()); + assertEquals(createdJob.writeDisposition(), remoteJob.writeDisposition()); + assertNotNull(remoteJob.etag()); + assertNotNull(remoteJob.statistics()); + assertNotNull(remoteJob.status()); + assertEquals(createdJob.selfLink(), remoteJob.selfLink()); + assertEquals(createdJob.userEmail(), remoteJob.userEmail()); + assertTrue(bigquery.delete(DATASET, sourceTableName)); + assertTrue(bigquery.delete(DATASET, destinationTableName)); + } + + @Test + public void testCreateAndGetJobWithSelectedFields() throws InterruptedException { + String sourceTableName = "test_create_and_get_job_with_selected_fields_source_table"; + String destinationTableName = "test_create_and_get_job_with_selected_fields_destination_table"; + TableId sourceTable = TableId.of(DATASET, sourceTableName); + BaseTableInfo tableInfo = TableInfo.of(sourceTable, SIMPLE_SCHEMA); + BaseTableInfo createdTableInfo = bigquery.create(tableInfo); + assertNotNull(createdTableInfo); + assertEquals(DATASET, createdTableInfo.tableId().dataset()); + assertEquals(sourceTableName, createdTableInfo.tableId().table()); + TableId destinationTable = TableId.of(DATASET, destinationTableName); + CopyJobInfo job = CopyJobInfo.of(destinationTable, sourceTable); + CopyJobInfo createdJob = bigquery.create(job, JobOption.fields(BigQuery.JobField.ETAG)); + assertNotNull(createdJob.jobId()); + assertNotNull(createdJob.sourceTables()); + assertNotNull(createdJob.destinationTable()); + assertNotNull(createdJob.etag()); + assertNull(createdJob.statistics()); + assertNull(createdJob.status()); + assertNull(createdJob.selfLink()); + assertNull(createdJob.userEmail()); + CopyJobInfo remoteJob = bigquery.getJob(createdJob.jobId(), + JobOption.fields(BigQuery.JobField.ETAG)); + assertEquals(createdJob.jobId(), remoteJob.jobId()); + assertEquals(createdJob.sourceTables(), remoteJob.sourceTables()); + assertEquals(createdJob.destinationTable(), remoteJob.destinationTable()); + assertEquals(createdJob.createDisposition(), remoteJob.createDisposition()); + assertEquals(createdJob.writeDisposition(), remoteJob.writeDisposition()); + assertNotNull(remoteJob.etag()); + assertNull(remoteJob.statistics()); + assertNull(remoteJob.status()); + assertNull(remoteJob.selfLink()); + assertNull(remoteJob.userEmail()); + assertTrue(bigquery.delete(DATASET, sourceTableName)); + assertTrue(bigquery.delete(DATASET, destinationTableName)); + } + + @Test + public void testCopyJob() throws InterruptedException { + String sourceTableName = "test_copy_job_source_table"; + String destinationTableName = "test_copy_job_destination_table"; + TableId sourceTable = TableId.of(DATASET, sourceTableName); + BaseTableInfo tableInfo = TableInfo.of(sourceTable, SIMPLE_SCHEMA); + BaseTableInfo createdTableInfo = bigquery.create(tableInfo); + assertNotNull(createdTableInfo); + assertEquals(DATASET, createdTableInfo.tableId().dataset()); + assertEquals(sourceTableName, createdTableInfo.tableId().table()); + TableId destinationTable = TableId.of(DATASET, destinationTableName); + CopyJobInfo job = CopyJobInfo.of(destinationTable, sourceTable); + CopyJobInfo remoteJob = bigquery.create(job); + while (remoteJob.status().state() != JobStatus.State.DONE) { + Thread.sleep(1000); + remoteJob = bigquery.getJob(remoteJob.jobId()); + } + assertNull(remoteJob.status().error()); + BaseTableInfo remoteTableInfo = bigquery.getTable(DATASET, destinationTableName); + assertNotNull(remoteTableInfo); + assertEquals(destinationTable.dataset(), remoteTableInfo.tableId().dataset()); + assertEquals(destinationTableName, remoteTableInfo.tableId().table()); + assertEquals(SIMPLE_SCHEMA, remoteTableInfo.schema()); + assertTrue(bigquery.delete(DATASET, sourceTableName)); + assertTrue(bigquery.delete(DATASET, destinationTableName)); + } + + @Test + public void testQueryJob() throws InterruptedException { + String tableName = "test_query_job_table"; + String query = new StringBuilder() + .append("SELECT TimestampField, StringField, BooleanField FROM ") + .append(TABLE_ID.table()) + .toString(); + TableId destinationTable = TableId.of(DATASET, tableName); + QueryJobInfo job = QueryJobInfo.builder(query) + .defaultDataset(DatasetId.of(DATASET)) + .destinationTable(destinationTable) + .build(); + QueryJobInfo remoteJob = bigquery.create(job); + while (remoteJob.status().state() != JobStatus.State.DONE) { + Thread.sleep(1000); + remoteJob = bigquery.getJob(remoteJob.jobId()); + } + assertNull(remoteJob.status().error()); + + QueryResponse response = bigquery.getQueryResults(remoteJob.jobId()); + while (!response.jobComplete()) { + Thread.sleep(1000); + response = bigquery.getQueryResults(response.jobId()); + } + assertFalse(response.hasErrors()); + assertEquals(QUERY_RESULT_SCHEMA, response.result().schema()); + int rowCount = 0; + for (List row : response.result().values()) { + FieldValue timestampCell = row.get(0); + FieldValue stringCell = row.get(1); + FieldValue booleanCell = row.get(2); + assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute()); + assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute()); + assertEquals(1408452095220000L, timestampCell.timestampValue()); + assertEquals("stringValue", stringCell.stringValue()); + assertEquals(false, booleanCell.booleanValue()); + rowCount++; + } + assertEquals(2, rowCount); + assertTrue(bigquery.delete(DATASET, tableName)); + } + + @Test + public void testExtractJob() throws InterruptedException { + String tableName = "test_export_job_table"; + TableId destinationTable = TableId.of(DATASET, tableName); + LoadJobInfo remoteLoadJob = bigquery.create( + LoadJobInfo.builder(destinationTable, "gs://" + BUCKET + "/" + LOAD_FILE) + .schema(SIMPLE_SCHEMA) + .build()); + while (remoteLoadJob.status().state() != JobStatus.State.DONE) { + Thread.sleep(1000); + remoteLoadJob = bigquery.getJob(remoteLoadJob.jobId()); + } + assertNull(remoteLoadJob.status().error()); + + ExtractJobInfo extractJob = + ExtractJobInfo.builder(destinationTable, "gs://" + BUCKET + "/" + EXTRACT_FILE) + .printHeader(false) + .build(); + ExtractJobInfo remoteExtractJob = bigquery.create(extractJob); + while (remoteExtractJob.status().state() != JobStatus.State.DONE) { + Thread.sleep(1000); + remoteExtractJob = bigquery.getJob(remoteExtractJob.jobId()); + } + assertNull(remoteExtractJob.status().error()); + assertEquals(CSV_CONTENT, + new String(storage.readAllBytes(BUCKET, EXTRACT_FILE), StandardCharsets.UTF_8)); + assertTrue(bigquery.delete(DATASET, tableName)); + } + + @Test + public void testCancelJob() throws InterruptedException { + String destinationTableName = "test_cancel_query_job_table"; + String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.table(); + TableId destinationTable = TableId.of(DATASET, destinationTableName); + QueryJobInfo job = QueryJobInfo.builder(query) + .defaultDataset(DatasetId.of(DATASET)) + .destinationTable(destinationTable) + .build(); + JobInfo remoteJob = bigquery.create(job); + assertTrue(bigquery.cancel(remoteJob.jobId())); + while (remoteJob.status().state() != JobStatus.State.DONE) { + Thread.sleep(1000); + remoteJob = bigquery.getJob(remoteJob.jobId()); + } + assertNull(remoteJob.status().error()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/InsertAllRequestTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/InsertAllRequestTest.java new file mode 100644 index 000000000000..fb744bd78920 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/InsertAllRequestTest.java @@ -0,0 +1,197 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; + +import org.junit.Test; + +import java.util.List; +import java.util.Map; + +public class InsertAllRequestTest { + + private static final Map CONTENT1 = + ImmutableMap.of("key", "val1"); + private static final Map CONTENT2 = + ImmutableMap.of("key", "val2"); + private static final List ROWS = + ImmutableList.of(InsertAllRequest.RowToInsert.of(CONTENT1), + InsertAllRequest.RowToInsert.of(CONTENT2)); + private static final List ROWS_WITH_ID = + ImmutableList.of(InsertAllRequest.RowToInsert.of("id1", CONTENT1), + InsertAllRequest.RowToInsert.of("id2", CONTENT2)); + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final Schema TABLE_SCHEMA = Schema.of(); + private static final BaseTableInfo TABLE_INFO = TableInfo.of(TABLE_ID, TABLE_SCHEMA); + private static final boolean SKIP_INVALID_ROWS = true; + private static final boolean IGNORE_UNKNOWN_VALUES = false; + private static final InsertAllRequest INSERT_ALL_REQUEST1 = InsertAllRequest.builder(TABLE_ID) + .addRow(CONTENT1) + .addRow(CONTENT2) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST2 = InsertAllRequest.builder(TABLE_ID) + .rows(ROWS) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST3 = + InsertAllRequest.builder(TABLE_ID.dataset(), TABLE_ID.table()) + .rows(ROWS_WITH_ID) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST4 = + InsertAllRequest.builder(TABLE_ID, ROWS) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST5 = + InsertAllRequest.builder(TABLE_ID.dataset(), TABLE_ID.table(), ROWS_WITH_ID) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST6 = + InsertAllRequest.builder(TABLE_ID, ROWS.get(0), ROWS.get(1)) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST7 = + InsertAllRequest.builder(TABLE_ID.dataset(), TABLE_ID.table(), ROWS_WITH_ID.get(0), + ROWS_WITH_ID.get(1)) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST8 = + InsertAllRequest.builder(TABLE_ID.dataset(), TABLE_ID.table()) + .addRow("id1", CONTENT1) + .addRow("id2", CONTENT2) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST9 = + InsertAllRequest.builder(TABLE_INFO) + .addRow("id1", CONTENT1) + .addRow("id2", CONTENT2) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .skipInvalidRows(SKIP_INVALID_ROWS) + .build(); + private static final InsertAllRequest INSERT_ALL_REQUEST10 = + InsertAllRequest.builder(TABLE_INFO) + .addRow("id1", CONTENT1) + .addRow("id2", CONTENT2) + .ignoreUnknownValues(true) + .skipInvalidRows(false) + .build(); + + @Test + public void testBuilder() { + assertEquals(TABLE_ID, INSERT_ALL_REQUEST1.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST2.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST3.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST4.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST5.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST6.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST7.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST8.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST9.table()); + assertEquals(TABLE_ID, INSERT_ALL_REQUEST10.table()); + assertEquals(ROWS, INSERT_ALL_REQUEST1.rows()); + assertEquals(ROWS, INSERT_ALL_REQUEST2.rows()); + assertEquals(ROWS, INSERT_ALL_REQUEST4.rows()); + assertEquals(ROWS, INSERT_ALL_REQUEST6.rows()); + assertEquals(ROWS_WITH_ID, INSERT_ALL_REQUEST3.rows()); + assertEquals(ROWS_WITH_ID, INSERT_ALL_REQUEST5.rows()); + assertEquals(ROWS_WITH_ID, INSERT_ALL_REQUEST7.rows()); + assertEquals(ROWS_WITH_ID, INSERT_ALL_REQUEST8.rows()); + assertEquals(ROWS_WITH_ID, INSERT_ALL_REQUEST9.rows()); + assertEquals(ROWS_WITH_ID, INSERT_ALL_REQUEST10.rows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST1.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST2.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST3.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST4.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST5.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST6.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST7.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST8.skipInvalidRows()); + assertEquals(SKIP_INVALID_ROWS, INSERT_ALL_REQUEST9.skipInvalidRows()); + assertFalse(INSERT_ALL_REQUEST10.skipInvalidRows()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST1.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST2.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST3.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST4.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST5.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST6.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST7.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST8.ignoreUnknownValues()); + assertEquals(IGNORE_UNKNOWN_VALUES, INSERT_ALL_REQUEST9.ignoreUnknownValues()); + assertTrue(INSERT_ALL_REQUEST10.ignoreUnknownValues()); + } + + @Test + public void testOf() { + InsertAllRequest request = InsertAllRequest.of(TABLE_ID, ROWS); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + request = InsertAllRequest.of(TABLE_INFO, ROWS); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + request = InsertAllRequest.of(TABLE_ID.dataset(), TABLE_ID.table(), ROWS); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + request = InsertAllRequest.of(TABLE_ID.dataset(), TABLE_ID.table(), ROWS); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + request = InsertAllRequest.of(TABLE_ID, ROWS.get(0), ROWS.get(1)); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + request = InsertAllRequest.of(TABLE_INFO, ROWS.get(0), ROWS.get(1)); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + request = InsertAllRequest.of(TABLE_ID.dataset(), TABLE_ID.table(), ROWS.get(0), ROWS.get(1)); + assertEquals(TABLE_ID, request.table()); + assertEquals(ROWS, request.rows()); + } + + @Test + public void testEquals() { + compareInsertAllRequest(INSERT_ALL_REQUEST1, INSERT_ALL_REQUEST2); + compareInsertAllRequest(INSERT_ALL_REQUEST2, INSERT_ALL_REQUEST4); + compareInsertAllRequest(INSERT_ALL_REQUEST3, INSERT_ALL_REQUEST5); + compareInsertAllRequest(INSERT_ALL_REQUEST4, INSERT_ALL_REQUEST6); + compareInsertAllRequest(INSERT_ALL_REQUEST5, INSERT_ALL_REQUEST7); + compareInsertAllRequest(INSERT_ALL_REQUEST7, INSERT_ALL_REQUEST8); + compareInsertAllRequest(INSERT_ALL_REQUEST8, INSERT_ALL_REQUEST9); + } + + private void compareInsertAllRequest(InsertAllRequest expected, InsertAllRequest value) { + assertEquals(expected, value); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.table(), value.table()); + assertEquals(expected.rows(), value.rows()); + assertEquals(expected.ignoreUnknownValues(), value.ignoreUnknownValues()); + assertEquals(expected.skipInvalidRows(), value.skipInvalidRows()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/InsertAllResponseTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/InsertAllResponseTest.java new file mode 100644 index 000000000000..b2eb0458f27f --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/InsertAllResponseTest.java @@ -0,0 +1,77 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; + +import org.junit.Test; + +import java.util.List; +import java.util.Map; + +public class InsertAllResponseTest { + + private static final List ERRORS1 = ImmutableList.of( + new BigQueryError("reason1", "location1", "message1"), + new BigQueryError("reason2", "location2", "message2")); + private static final List ERRORS2 = ImmutableList.of( + new BigQueryError("reason3", "location3", "message3"), + new BigQueryError("reason4", "location4", "message4")); + private static final Map> ERRORS_MAP = ImmutableMap.of( + 0L, ERRORS1, 1L, ERRORS2); + private static final InsertAllResponse INSERT_ALL_RESPONSE = new InsertAllResponse(ERRORS_MAP); + private static final InsertAllResponse EMPTY_INSERT_ALL_RESPONSE = new InsertAllResponse(null); + + @Test + public void testConstructor() { + assertEquals(INSERT_ALL_RESPONSE, INSERT_ALL_RESPONSE); + } + + @Test + public void testErrorsFor() { + assertEquals(ERRORS1, INSERT_ALL_RESPONSE.errorsFor(0L)); + assertEquals(ERRORS2, INSERT_ALL_RESPONSE.errorsFor(1L)); + assertNull(INSERT_ALL_RESPONSE.errorsFor(2L)); + } + + @Test + public void testHasErrors() { + assertTrue(INSERT_ALL_RESPONSE.hasErrors()); + assertFalse(EMPTY_INSERT_ALL_RESPONSE.hasErrors()); + } + + @Test + public void testToPbAndFromPb() { + compareInsertAllResponse(INSERT_ALL_RESPONSE, + InsertAllResponse.fromPb(INSERT_ALL_RESPONSE.toPb())); + compareInsertAllResponse(EMPTY_INSERT_ALL_RESPONSE, + InsertAllResponse.fromPb(EMPTY_INSERT_ALL_RESPONSE.toPb())); + } + + private void compareInsertAllResponse(InsertAllResponse expected, InsertAllResponse value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.insertErrors(), value.insertErrors()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobIdTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobIdTest.java new file mode 100644 index 000000000000..740830f07544 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobIdTest.java @@ -0,0 +1,56 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class JobIdTest { + + private static final JobId JOB = JobId.of("job"); + private static final JobId JOB_COMPLETE = JobId.of("project", "job"); + + @Test + public void testOf() { + assertEquals(null, JOB.project()); + assertEquals("job", JOB.job()); + assertEquals("project", JOB_COMPLETE.project()); + assertEquals("job", JOB_COMPLETE.job()); + } + + @Test + public void testEquals() { + compareJobs(JOB, JobId.of("job")); + compareJobs(JOB_COMPLETE, JobId.of("project", "job")); + } + + @Test + public void testToPbAndFromPb() { + compareJobs(JOB, JobId.fromPb(JOB.toPb())); + compareJobs(JOB_COMPLETE, JobId.fromPb(JOB_COMPLETE.toPb())); + } + + private void compareJobs(JobId expected, JobId value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.project(), value.project()); + assertEquals(expected.job(), value.job()); + assertEquals(expected.hashCode(), value.hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobStatisticsTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobStatisticsTest.java new file mode 100644 index 000000000000..5b2123faa67d --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobStatisticsTest.java @@ -0,0 +1,178 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.JobStatistics.ExtractStatistics; +import com.google.gcloud.bigquery.JobStatistics.LoadStatistics; +import com.google.gcloud.bigquery.JobStatistics.QueryStatistics; + +import org.junit.Test; + +import java.util.List; + +public class JobStatisticsTest { + + private static final Integer BILLING_TIER = 42; + private static final Boolean CACHE_HIT = true; + private static final Long TOTAL_BYTES_BILLED = 24L; + private static final Long TOTAL_BYTES_PROCESSED = 42L; + private static final Long INPUT_BYTES = 1L; + private static final Long INPUT_FILES = 2L; + private static final Long OUTPUT_BYTES = 3L; + private static final Long OUTPUT_ROWS = 4L; + private static final List FILE_COUNT = ImmutableList.of(1L, 2L, 3L); + private static final Long CREATION_TIME = 10L; + private static final Long END_TIME = 20L; + private static final Long START_TIME = 15L; + private static final ExtractStatistics EXTRACT_STATISTICS = ExtractStatistics.builder() + .creationTime(CREATION_TIME) + .endTime(END_TIME) + .startTime(START_TIME) + .destinationUriFileCounts(FILE_COUNT) + .build(); + private static final LoadStatistics LOAD_STATISTICS = LoadStatistics.builder() + .creationTime(CREATION_TIME) + .endTime(END_TIME) + .startTime(START_TIME) + .inputBytes(INPUT_BYTES) + .inputFiles(INPUT_FILES) + .outputBytes(OUTPUT_BYTES) + .outputRows(OUTPUT_ROWS) + .build(); + private static final LoadStatistics LOAD_STATISTICS_INCOMPLETE = LoadStatistics.builder() + .creationTime(CREATION_TIME) + .endTime(END_TIME) + .startTime(START_TIME) + .inputBytes(INPUT_BYTES) + .inputFiles(INPUT_FILES) + .build(); + private static final QueryStatistics QUERY_STATISTICS = QueryStatistics.builder() + .creationTime(CREATION_TIME) + .endTime(END_TIME) + .startTime(START_TIME) + .billingTier(BILLING_TIER) + .cacheHit(CACHE_HIT) + .totalBytesBilled(TOTAL_BYTES_BILLED) + .totalBytesProcessed(TOTAL_BYTES_PROCESSED) + .build(); + private static final QueryStatistics QUERY_STATISTICS_INCOMPLETE = QueryStatistics.builder() + .creationTime(CREATION_TIME) + .endTime(END_TIME) + .startTime(START_TIME) + .billingTier(BILLING_TIER) + .cacheHit(CACHE_HIT) + .build(); + private static final JobStatistics STATISTICS = JobStatistics.builder() + .creationTime(CREATION_TIME) + .endTime(END_TIME) + .startTime(START_TIME) + .build(); + + @Test + public void testBuilder() { + assertEquals(CREATION_TIME, STATISTICS.creationTime()); + assertEquals(START_TIME, STATISTICS.startTime()); + assertEquals(END_TIME, STATISTICS.endTime()); + + assertEquals(CREATION_TIME, EXTRACT_STATISTICS.creationTime()); + assertEquals(START_TIME, EXTRACT_STATISTICS.startTime()); + assertEquals(END_TIME, EXTRACT_STATISTICS.endTime()); + assertEquals(FILE_COUNT, EXTRACT_STATISTICS.destinationUriFileCounts()); + + assertEquals(CREATION_TIME, LOAD_STATISTICS.creationTime()); + assertEquals(START_TIME, LOAD_STATISTICS.startTime()); + assertEquals(END_TIME, LOAD_STATISTICS.endTime()); + assertEquals(INPUT_BYTES, LOAD_STATISTICS.inputBytes()); + assertEquals(INPUT_FILES, LOAD_STATISTICS.inputFiles()); + assertEquals(OUTPUT_BYTES, LOAD_STATISTICS.outputBytes()); + assertEquals(OUTPUT_ROWS, LOAD_STATISTICS.outputRows()); + + assertEquals(CREATION_TIME, QUERY_STATISTICS.creationTime()); + assertEquals(START_TIME, QUERY_STATISTICS.startTime()); + assertEquals(END_TIME, QUERY_STATISTICS.endTime()); + assertEquals(BILLING_TIER, QUERY_STATISTICS.billingTier()); + assertEquals(CACHE_HIT, QUERY_STATISTICS.cacheHit()); + assertEquals(TOTAL_BYTES_BILLED, QUERY_STATISTICS.totalBytesBilled()); + assertEquals(TOTAL_BYTES_PROCESSED, QUERY_STATISTICS.totalBytesProcessed()); + + assertEquals(CREATION_TIME, LOAD_STATISTICS_INCOMPLETE.creationTime()); + assertEquals(START_TIME, LOAD_STATISTICS_INCOMPLETE.startTime()); + assertEquals(END_TIME, LOAD_STATISTICS_INCOMPLETE.endTime()); + assertEquals(INPUT_BYTES, LOAD_STATISTICS_INCOMPLETE.inputBytes()); + assertEquals(INPUT_FILES, LOAD_STATISTICS_INCOMPLETE.inputFiles()); + assertEquals(null, LOAD_STATISTICS_INCOMPLETE.outputBytes()); + assertEquals(null, LOAD_STATISTICS_INCOMPLETE.outputRows()); + + assertEquals(CREATION_TIME, QUERY_STATISTICS_INCOMPLETE.creationTime()); + assertEquals(START_TIME, QUERY_STATISTICS_INCOMPLETE.startTime()); + assertEquals(END_TIME, QUERY_STATISTICS_INCOMPLETE.endTime()); + assertEquals(BILLING_TIER, QUERY_STATISTICS_INCOMPLETE.billingTier()); + assertEquals(CACHE_HIT, QUERY_STATISTICS_INCOMPLETE.cacheHit()); + assertEquals(null, QUERY_STATISTICS_INCOMPLETE.totalBytesBilled()); + assertEquals(null, QUERY_STATISTICS_INCOMPLETE.totalBytesProcessed()); + } + + @Test + public void testToPbAndFromPb() { + compareExtractStatistics(EXTRACT_STATISTICS, + ExtractStatistics.fromPb(EXTRACT_STATISTICS.toPb())); + compareLoadStatistics(LOAD_STATISTICS, LoadStatistics.fromPb(LOAD_STATISTICS.toPb())); + compareQueryStatistics(QUERY_STATISTICS, QueryStatistics.fromPb(QUERY_STATISTICS.toPb())); + compareStatistics(STATISTICS, JobStatistics.fromPb(STATISTICS.toPb())); + + compareLoadStatistics(LOAD_STATISTICS_INCOMPLETE, + LoadStatistics.fromPb(LOAD_STATISTICS_INCOMPLETE.toPb())); + compareQueryStatistics(QUERY_STATISTICS_INCOMPLETE, + QueryStatistics.fromPb(QUERY_STATISTICS_INCOMPLETE.toPb())); + } + + private void compareExtractStatistics(ExtractStatistics expected, ExtractStatistics value) { + assertEquals(expected, value); + compareStatistics(expected, value); + assertEquals(expected.destinationUriFileCounts(), value.destinationUriFileCounts()); + } + + private void compareLoadStatistics(LoadStatistics expected, LoadStatistics value) { + assertEquals(expected, value); + compareStatistics(expected, value); + assertEquals(expected.inputBytes(), value.inputBytes()); + assertEquals(expected.inputFiles(), value.inputFiles()); + assertEquals(expected.outputBytes(), value.outputBytes()); + assertEquals(expected.outputRows(), value.outputRows()); + } + + private void compareQueryStatistics(QueryStatistics expected, QueryStatistics value) { + assertEquals(expected, value); + compareStatistics(expected, value); + assertEquals(expected.billingTier(), value.billingTier()); + assertEquals(expected.cacheHit(), value.cacheHit()); + assertEquals(expected.totalBytesBilled(), value.totalBytesBilled()); + assertEquals(expected.totalBytesProcessed(), value.totalBytesProcessed()); + } + + private void compareStatistics(JobStatistics expected, JobStatistics value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.creationTime(), value.creationTime()); + assertEquals(expected.endTime(), value.endTime()); + assertEquals(expected.startTime(), value.startTime()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobStatusTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobStatusTest.java new file mode 100644 index 000000000000..c44386a3e72c --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/JobStatusTest.java @@ -0,0 +1,67 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class JobStatusTest { + + private static final JobStatus.State STATE = JobStatus.State.DONE; + private static final BigQueryError ERROR = + new BigQueryError("reason", "location", "message", "debugInfo"); + private static final List ALL_ERRORS = ImmutableList.of( + new BigQueryError("reason1", "location1", "message1", "debugInfo1"), + new BigQueryError("reason2", "location2", "message2", "debugInfo2")); + private static final JobStatus JOB_STATUS = new JobStatus(STATE, ERROR, ALL_ERRORS); + private static final JobStatus JOB_STATUS_INCOMPLETE1 = new JobStatus(STATE, ERROR, null); + private static final JobStatus JOB_STATUS_INCOMPLETE2 = new JobStatus(STATE, null, null); + + @Test + public void testConstructor() { + assertEquals(STATE, JOB_STATUS.state()); + assertEquals(ERROR, JOB_STATUS.error()); + assertEquals(ALL_ERRORS, JOB_STATUS.executionErrors()); + + assertEquals(STATE, JOB_STATUS_INCOMPLETE1.state()); + assertEquals(ERROR, JOB_STATUS_INCOMPLETE1.error()); + assertEquals(null, JOB_STATUS_INCOMPLETE1.executionErrors()); + + assertEquals(STATE, JOB_STATUS_INCOMPLETE2.state()); + assertEquals(null, JOB_STATUS_INCOMPLETE2.error()); + assertEquals(null, JOB_STATUS_INCOMPLETE2.executionErrors()); + } + + @Test + public void testToPbAndFromPb() { + compareStatus(JOB_STATUS, JobStatus.fromPb(JOB_STATUS.toPb())); + compareStatus(JOB_STATUS_INCOMPLETE1, JobStatus.fromPb(JOB_STATUS_INCOMPLETE1.toPb())); + compareStatus(JOB_STATUS_INCOMPLETE2, JobStatus.fromPb(JOB_STATUS_INCOMPLETE2.toPb())); + } + + private void compareStatus(JobStatus expected, JobStatus value) { + assertEquals(expected, value); + assertEquals(expected.state(), value.state()); + assertEquals(expected.error(), value.error()); + assertEquals(expected.executionErrors(), value.executionErrors()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/LoadJobInfoTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/LoadJobInfoTest.java new file mode 100644 index 000000000000..bb2a263fc3e0 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/LoadJobInfoTest.java @@ -0,0 +1,178 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; +import com.google.gcloud.bigquery.JobStatistics.LoadStatistics; + +import org.junit.Test; + +import java.nio.charset.StandardCharsets; +import java.util.List; + +public class LoadJobInfoTest { + + private static final String ETAG = "etag"; + private static final String ID = "id"; + private static final String SELF_LINK = "selfLink"; + private static final String EMAIL = "email"; + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder() + .allowJaggedRows(true) + .allowQuotedNewLines(false) + .encoding(StandardCharsets.UTF_8) + .build(); + private static final String SOURCE_URI = "uri"; + private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final CreateDisposition CREATE_DISPOSITION = CreateDisposition.CREATE_IF_NEEDED; + private static final WriteDisposition WRITE_DISPOSITION = WriteDisposition.WRITE_APPEND; + private static final Integer MAX_BAD_RECORDS = 42; + private static final String FORMAT = "CSV"; + private static final Boolean IGNORE_UNKNOWN_VALUES = true; + private static final List PROJECTION_FIELDS = ImmutableList.of("field1", "field2"); + private static final JobId JOB_ID = JobId.of("job"); + private static final JobStatus JOB_STATUS = new JobStatus(JobStatus.State.DONE); + private static final Field FIELD_SCHEMA = Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA); + private static final LoadStatistics JOB_STATISTICS = LoadStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .inputFiles(42L) + .outputBytes(1024L) + .inputBytes(2048L) + .outputRows(24L) + .build(); + private static final LoadJobInfo LOAD_JOB = LoadJobInfo.builder(TABLE_ID, SOURCE_URIS) + .etag(ETAG) + .id(ID) + .selfLink(SELF_LINK) + .userEmail(EMAIL) + .jobId(JOB_ID) + .status(JOB_STATUS) + .createDisposition(CREATE_DISPOSITION) + .writeDisposition(WRITE_DISPOSITION) + .formatOptions(CSV_OPTIONS) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .maxBadRecords(MAX_BAD_RECORDS) + .projectionFields(PROJECTION_FIELDS) + .schema(TABLE_SCHEMA) + .statistics(JOB_STATISTICS) + .build(); + + @Test + public void testToBuilder() { + compareLoadJobInfo(LOAD_JOB, LOAD_JOB.toBuilder().build()); + LoadJobInfo job = LOAD_JOB.toBuilder() + .destinationTable(TableId.of("dataset", "newTable")) + .build(); + assertEquals("newTable", job.destinationTable().table()); + job = job.toBuilder().destinationTable(TABLE_ID).build(); + compareLoadJobInfo(LOAD_JOB, job); + } + + @Test + public void testOf() { + LoadJobInfo job = LoadJobInfo.of(TABLE_ID, SOURCE_URIS); + assertEquals(TABLE_ID, job.destinationTable()); + assertEquals(SOURCE_URIS, job.sourceUris()); + job = LoadJobInfo.of(TABLE_ID, SOURCE_URI); + assertEquals(TABLE_ID, job.destinationTable()); + assertEquals(ImmutableList.of(SOURCE_URI), job.sourceUris()); + job = LoadJobInfo.of(JOB_ID, TABLE_ID, SOURCE_URIS); + assertEquals(JOB_ID, job.jobId()); + assertEquals(TABLE_ID, job.destinationTable()); + assertEquals(SOURCE_URIS, job.sourceUris()); + job = LoadJobInfo.of(JOB_ID, TABLE_ID, SOURCE_URI); + assertEquals(JOB_ID, job.jobId()); + assertEquals(TABLE_ID, job.destinationTable()); + assertEquals(ImmutableList.of(SOURCE_URI), job.sourceUris()); + } + + @Test + public void testToBuilderIncomplete() { + LoadJobInfo job = LoadJobInfo.of(TABLE_ID, SOURCE_URIS); + compareLoadJobInfo(job, job.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(ETAG, LOAD_JOB.etag()); + assertEquals(ID, LOAD_JOB.id()); + assertEquals(SELF_LINK, LOAD_JOB.selfLink()); + assertEquals(EMAIL, LOAD_JOB.userEmail()); + assertEquals(JOB_ID, LOAD_JOB.jobId()); + assertEquals(JOB_STATUS, LOAD_JOB.status()); + assertEquals(TABLE_ID, LOAD_JOB.destinationTable()); + assertEquals(SOURCE_URIS, LOAD_JOB.sourceUris()); + assertEquals(CREATE_DISPOSITION, LOAD_JOB.createDisposition()); + assertEquals(WRITE_DISPOSITION, LOAD_JOB.writeDisposition()); + assertEquals(CSV_OPTIONS, LOAD_JOB.csvOptions()); + assertEquals(FORMAT, LOAD_JOB.format()); + assertEquals(IGNORE_UNKNOWN_VALUES, LOAD_JOB.ignoreUnknownValues()); + assertEquals(MAX_BAD_RECORDS, LOAD_JOB.maxBadRecords()); + assertEquals(PROJECTION_FIELDS, LOAD_JOB.projectionFields()); + assertEquals(TABLE_SCHEMA, LOAD_JOB.schema()); + assertEquals(JOB_STATISTICS, LOAD_JOB.statistics()); + } + + @Test + public void testToPbAndFromPb() { + assertNotNull(LOAD_JOB.toPb().getConfiguration().getLoad()); + assertNull(LOAD_JOB.toPb().getConfiguration().getExtract()); + assertNull(LOAD_JOB.toPb().getConfiguration().getCopy()); + assertNull(LOAD_JOB.toPb().getConfiguration().getQuery()); + assertEquals(JOB_STATISTICS, JobStatistics.fromPb(LOAD_JOB.toPb().getStatistics())); + compareLoadJobInfo(LOAD_JOB, LoadJobInfo.fromPb(LOAD_JOB.toPb())); + compareLoadJobInfo(LOAD_JOB, (LoadJobInfo) JobInfo.fromPb(LOAD_JOB.toPb())); + LoadJobInfo job = LoadJobInfo.of(TABLE_ID, SOURCE_URIS); + compareLoadJobInfo(job, LoadJobInfo.fromPb(job.toPb())); + compareLoadJobInfo(job, (LoadJobInfo) JobInfo.fromPb(job.toPb())); + } + + private void compareLoadJobInfo(LoadJobInfo expected, LoadJobInfo value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.id(), value.id()); + assertEquals(expected.jobId(), value.jobId()); + assertEquals(expected.selfLink(), value.selfLink()); + assertEquals(expected.status(), value.status()); + assertEquals(expected.statistics(), value.statistics()); + assertEquals(expected.userEmail(), value.userEmail()); + assertEquals(expected.destinationTable(), value.destinationTable()); + assertEquals(expected.sourceUris(), value.sourceUris()); + assertEquals(expected.createDisposition(), value.createDisposition()); + assertEquals(expected.writeDisposition(), value.writeDisposition()); + assertEquals(expected.csvOptions(), value.csvOptions()); + assertEquals(expected.format(), value.format()); + assertEquals(expected.ignoreUnknownValues(), value.ignoreUnknownValues()); + assertEquals(expected.maxBadRecords(), value.maxBadRecords()); + assertEquals(expected.projectionFields(), value.projectionFields()); + assertEquals(expected.schema(), value.schema()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/OptionTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/OptionTest.java new file mode 100644 index 000000000000..225fc284b203 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/OptionTest.java @@ -0,0 +1,38 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.gcloud.spi.BigQueryRpc; + +import org.junit.Test; + +public class OptionTest { + + @Test + public void testOption() { + Option option = new Option(BigQueryRpc.Option.PAGE_TOKEN, "token"); + assertEquals(BigQueryRpc.Option.PAGE_TOKEN, option.rpcOption()); + assertEquals("token", option.value()); + } + + @Test(expected = NullPointerException.class) + public void testNullRpcOption() { + new Option(null, "token"); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryJobInfoTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryJobInfoTest.java new file mode 100644 index 000000000000..f99bec19efd9 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryJobInfoTest.java @@ -0,0 +1,202 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.bigquery.JobInfo.CreateDisposition; +import com.google.gcloud.bigquery.JobInfo.WriteDisposition; +import com.google.gcloud.bigquery.JobStatistics.QueryStatistics; +import com.google.gcloud.bigquery.QueryJobInfo.Priority; + +import org.junit.Test; + +import java.util.List; +import java.util.Map; + +public class QueryJobInfoTest { + + private static final String ETAG = "etag"; + private static final String ID = "id"; + private static final String SELF_LINK = "selfLink"; + private static final String EMAIL = "email"; + private static final String QUERY = "BigQuery SQL"; + private static final DatasetId DATASET_ID = DatasetId.of("project", "dataset"); + private static final TableId TABLE_ID = TableId.of("project", "dataset", "table"); + private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); + private static final Integer MAX_BAD_RECORDS = 42; + private static final Boolean IGNORE_UNKNOWN_VALUES = true; + private static final String COMPRESSION = "GZIP"; + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder().build(); + private static final ExternalDataConfiguration TABLE_CONFIGURATION = ExternalDataConfiguration + .builder(SOURCE_URIS, TABLE_SCHEMA, CSV_OPTIONS) + .compression(COMPRESSION) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .maxBadRecords(MAX_BAD_RECORDS) + .build(); + private static final Map TABLE_DEFINITIONS = + ImmutableMap.of("tableName", TABLE_CONFIGURATION); + private static final CreateDisposition CREATE_DISPOSITION = CreateDisposition.CREATE_IF_NEEDED; + private static final WriteDisposition WRITE_DISPOSITION = WriteDisposition.WRITE_APPEND; + private static final Priority PRIORITY = Priority.BATCH; + private static final boolean ALLOW_LARGE_RESULTS = true; + private static final boolean USE_QUERY_CACHE = false; + private static final boolean FLATTEN_RESULTS = true; + private static final List USER_DEFINED_FUNCTIONS = ImmutableList.of( + UserDefinedFunction.inline("Function"), UserDefinedFunction.fromUri("URI")); + private static final JobId JOB_ID = JobId.of("job"); + private static final JobStatus JOB_STATUS = new JobStatus(JobStatus.State.DONE); + private static final QueryStatistics JOB_STATISTICS = QueryStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .totalBytesProcessed(2048L) + .totalBytesBilled(1024L) + .cacheHit(false) + .billingTier(42) + .build(); + private static final QueryJobInfo QUERY_JOB = QueryJobInfo.builder(QUERY) + .etag(ETAG) + .id(ID) + .selfLink(SELF_LINK) + .userEmail(EMAIL) + .jobId(JOB_ID) + .status(JOB_STATUS) + .useQueryCache(USE_QUERY_CACHE) + .tableDefinitions(TABLE_DEFINITIONS) + .allowLargeResults(ALLOW_LARGE_RESULTS) + .createDisposition(CREATE_DISPOSITION) + .defaultDataset(DATASET_ID) + .destinationTable(TABLE_ID) + .writeDisposition(WRITE_DISPOSITION) + .priority(PRIORITY) + .flattenResults(FLATTEN_RESULTS) + .userDefinedFunctions(USER_DEFINED_FUNCTIONS) + .dryRun(true) + .statistics(JOB_STATISTICS) + .build(); + + @Test + public void testToBuilder() { + compareQueryJobInfo(QUERY_JOB, QUERY_JOB.toBuilder().build()); + QueryJobInfo job = QUERY_JOB.toBuilder() + .query("New BigQuery SQL") + .build(); + assertEquals("New BigQuery SQL", job.query()); + job = job.toBuilder().query(QUERY).build(); + compareQueryJobInfo(QUERY_JOB, job); + } + + @Test + public void testOf() { + QueryJobInfo job = QueryJobInfo.of(QUERY); + assertEquals(QUERY, job.query()); + job = QueryJobInfo.of(JOB_ID, QUERY); + assertEquals(JOB_ID, job.jobId()); + assertEquals(QUERY, job.query()); + } + + @Test + public void testToBuilderIncomplete() { + QueryJobInfo job = QueryJobInfo.of(QUERY); + compareQueryJobInfo(job, job.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(ETAG, QUERY_JOB.etag()); + assertEquals(ID, QUERY_JOB.id()); + assertEquals(SELF_LINK, QUERY_JOB.selfLink()); + assertEquals(EMAIL, QUERY_JOB.userEmail()); + assertEquals(JOB_ID, QUERY_JOB.jobId()); + assertEquals(JOB_STATUS, QUERY_JOB.status()); + assertEquals(ALLOW_LARGE_RESULTS, QUERY_JOB.allowLargeResults()); + assertEquals(CREATE_DISPOSITION, QUERY_JOB.createDisposition()); + assertEquals(DATASET_ID, QUERY_JOB.defaultDataset()); + assertEquals(TABLE_ID, QUERY_JOB.destinationTable()); + assertEquals(FLATTEN_RESULTS, QUERY_JOB.flattenResults()); + assertEquals(PRIORITY, QUERY_JOB.priority()); + assertEquals(QUERY, QUERY_JOB.query()); + assertEquals(TABLE_DEFINITIONS, QUERY_JOB.tableDefinitions()); + assertEquals(USE_QUERY_CACHE, QUERY_JOB.useQueryCache()); + assertEquals(USER_DEFINED_FUNCTIONS, QUERY_JOB.userDefinedFunctions()); + assertEquals(WRITE_DISPOSITION, QUERY_JOB.writeDisposition()); + assertTrue(QUERY_JOB.dryRun()); + assertEquals(JOB_STATISTICS, QUERY_JOB.statistics()); + } + + @Test + public void testToPbAndFromPb() { + assertNotNull(QUERY_JOB.toPb().getConfiguration().getQuery()); + assertNull(QUERY_JOB.toPb().getConfiguration().getExtract()); + assertNull(QUERY_JOB.toPb().getConfiguration().getCopy()); + assertNull(QUERY_JOB.toPb().getConfiguration().getLoad()); + assertEquals(JOB_STATISTICS, JobStatistics.fromPb(QUERY_JOB.statistics().toPb())); + compareQueryJobInfo(QUERY_JOB, QueryJobInfo.fromPb(QUERY_JOB.toPb())); + compareQueryJobInfo(QUERY_JOB, + (QueryJobInfo) JobInfo.fromPb(QUERY_JOB.toPb())); + QueryJobInfo job = QueryJobInfo.of(QUERY); + compareQueryJobInfo(job, QueryJobInfo.fromPb(job.toPb())); + compareQueryJobInfo(job, (QueryJobInfo) JobInfo.fromPb(job.toPb())); + } + + private void compareQueryJobInfo(QueryJobInfo expected, QueryJobInfo value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.id(), value.id()); + assertEquals(expected.jobId(), value.jobId()); + assertEquals(expected.selfLink(), value.selfLink()); + assertEquals(expected.status(), value.status()); + assertEquals(expected.statistics(), value.statistics()); + assertEquals(expected.dryRun(), value.dryRun()); + assertEquals(expected.userEmail(), value.userEmail()); + assertEquals(expected.allowLargeResults(), value.allowLargeResults()); + assertEquals(expected.createDisposition(), value.createDisposition()); + assertEquals(expected.defaultDataset(), value.defaultDataset()); + assertEquals(expected.destinationTable(), value.destinationTable()); + assertEquals(expected.flattenResults(), value.flattenResults()); + assertEquals(expected.priority(), value.priority()); + assertEquals(expected.query(), value.query()); + assertEquals(expected.tableDefinitions(), value.tableDefinitions()); + assertEquals(expected.useQueryCache(), value.useQueryCache()); + assertEquals(expected.userDefinedFunctions(), value.userDefinedFunctions()); + assertEquals(expected.writeDisposition(), value.writeDisposition()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryRequestTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryRequestTest.java new file mode 100644 index 000000000000..276e4f6792b3 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryRequestTest.java @@ -0,0 +1,103 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +public class QueryRequestTest { + + private static final String QUERY = "BigQuery SQL"; + private static final DatasetId DATASET_ID = DatasetId.of("project", "dataset"); + private static final Boolean USE_QUERY_CACHE = true; + private static final Boolean DRY_RUN = false; + private static final Long MAX_RESULTS = 42L; + private static final Long MAX_WAIT_TIME = 42000L; + private static final QueryRequest QUERY_REQUEST = QueryRequest.builder(QUERY) + .useQueryCache(USE_QUERY_CACHE) + .defaultDataset(DATASET_ID) + .dryRun(DRY_RUN) + .maxResults(MAX_RESULTS) + .maxWaitTime(MAX_WAIT_TIME) + .build(); + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @Test + public void testToBuilder() { + compareQueryRequest(QUERY_REQUEST, QUERY_REQUEST.toBuilder().build()); + QueryRequest queryRequest = QUERY_REQUEST.toBuilder() + .query("New BigQuery SQL") + .build(); + assertEquals("New BigQuery SQL", queryRequest.query()); + queryRequest = queryRequest.toBuilder().query(QUERY).build(); + compareQueryRequest(QUERY_REQUEST, queryRequest); + } + + @Test + public void testToBuilderIncomplete() { + QueryRequest queryRequest = QueryRequest.of(QUERY); + compareQueryRequest(queryRequest, queryRequest.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(QUERY, QUERY_REQUEST.query()); + assertEquals(USE_QUERY_CACHE, QUERY_REQUEST.useQueryCache()); + assertEquals(DATASET_ID, QUERY_REQUEST.defaultDataset()); + assertEquals(DRY_RUN, QUERY_REQUEST.dryRun()); + assertEquals(MAX_RESULTS, QUERY_REQUEST.maxResults()); + assertEquals(MAX_WAIT_TIME, QUERY_REQUEST.maxWaitTime()); + thrown.expect(NullPointerException.class); + QueryRequest.builder(null); + } + + @Test + public void testOf() { + QueryRequest request = QueryRequest.of(QUERY); + assertEquals(QUERY, request.query()); + assertNull(request.useQueryCache()); + assertNull(request.defaultDataset()); + assertNull(request.dryRun()); + assertNull(request.maxResults()); + assertNull(request.maxWaitTime()); + thrown.expect(NullPointerException.class); + QueryRequest.of(null); + } + + @Test + public void testToPbAndFromPb() { + compareQueryRequest(QUERY_REQUEST, QueryRequest.fromPb(QUERY_REQUEST.toPb())); + QueryRequest queryRequest = QueryRequest.of(QUERY); + compareQueryRequest(queryRequest, QueryRequest.fromPb(queryRequest.toPb())); + } + + private void compareQueryRequest(QueryRequest expected, QueryRequest value) { + assertEquals(expected, value); + assertEquals(expected.query(), value.query()); + assertEquals(expected.useQueryCache(), value.useQueryCache()); + assertEquals(expected.defaultDataset(), value.defaultDataset()); + assertEquals(expected.dryRun(), value.dryRun()); + assertEquals(expected.maxResults(), value.maxResults()); + assertEquals(expected.maxWaitTime(), value.maxWaitTime()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResponseTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResponseTest.java new file mode 100644 index 000000000000..3ecae9b76e18 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResponseTest.java @@ -0,0 +1,107 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class QueryResponseTest { + + private static final String ETAG = "etag"; + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Schema SCHEMA = Schema.of(FIELD_SCHEMA1); + private static final JobId JOB_ID = JobId.of("project", "job"); + private static final Long TOTAL_ROWS = 42L; + private static final QueryResult.QueryResultsPageFetcher FETCHER = + new QueryResult.QueryResultsPageFetcher() { + @Override + public QueryResult nextPage() { + return null; + } + }; + private static final Long TOTAL_BYTES_PROCESSED = 4200L; + private static final Boolean JOB_COMPLETE = true; + private static final List ERRORS = ImmutableList.of( + new BigQueryError("reason1", "location1", "message1", "debugInfo1"), + new BigQueryError("reason2", "location2", "message2", "debugInfo2") + ); + private static final Boolean CACHE_HIT = false; + private static final QueryResult QUERY_RESULT = QueryResult.builder() + .schema(SCHEMA) + .totalRows(TOTAL_ROWS) + .totalBytesProcessed(TOTAL_BYTES_PROCESSED) + .cursor("cursor") + .pageFetcher(FETCHER) + .results(ImmutableList.>of()) + .cacheHit(CACHE_HIT) + .build(); + private static final QueryResponse QUERY_RESPONSE = QueryResponse.builder() + .etag(ETAG) + .jobId(JOB_ID) + .jobComplete(JOB_COMPLETE) + .executionErrors(ERRORS) + .result(QUERY_RESULT) + .build(); + + @Test + public void testBuilder() { + assertEquals(ETAG, QUERY_RESPONSE.etag()); + assertEquals(QUERY_RESULT, QUERY_RESPONSE.result()); + assertEquals(JOB_ID, QUERY_RESPONSE.jobId()); + assertEquals(JOB_COMPLETE, QUERY_RESPONSE.jobComplete()); + assertEquals(ERRORS, QUERY_RESPONSE.executionErrors()); + assertTrue(QUERY_RESPONSE.hasErrors()); + } + + @Test + public void testBuilderIncomplete() { + QueryResponse queryResponse = QueryResponse.builder().jobComplete(false).build(); + assertNull(queryResponse.etag()); + assertNull(queryResponse.result()); + assertNull(queryResponse.jobId()); + assertFalse(queryResponse.jobComplete()); + assertEquals(ImmutableList.of(), queryResponse.executionErrors()); + assertFalse(queryResponse.hasErrors()); + } + + @Test + public void testEquals() { + compareQueryResponse(QUERY_RESPONSE, QUERY_RESPONSE); + } + + private void compareQueryResponse(QueryResponse expected, QueryResponse value) { + assertEquals(expected, value); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.result(), value.result()); + assertEquals(expected.jobId(), value.jobId()); + assertEquals(expected.jobComplete(), value.jobComplete()); + assertEquals(expected.executionErrors(), value.executionErrors()); + assertEquals(expected.hasErrors(), value.hasErrors()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResultTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResultTest.java new file mode 100644 index 000000000000..b6810ed93143 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/QueryResultTest.java @@ -0,0 +1,91 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class QueryResultTest { + + private static final String CURSOR = "cursor"; + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Schema SCHEMA = Schema.of(FIELD_SCHEMA1); + private static final long TOTAL_ROWS = 42L; + private static final QueryResult.QueryResultsPageFetcher FETCHER = + new QueryResult.QueryResultsPageFetcher() { + @Override + public QueryResult nextPage() { + return null; + } + }; + private static final long TOTAL_BYTES_PROCESSED = 4200L; + private static final boolean CACHE_HIT = false; + private static final QueryResult QUERY_RESULT = QueryResult.builder() + .schema(SCHEMA) + .totalRows(TOTAL_ROWS) + .totalBytesProcessed(TOTAL_BYTES_PROCESSED) + .cursor(CURSOR) + .pageFetcher(FETCHER) + .results(ImmutableList.>of()) + .cacheHit(CACHE_HIT) + .build(); + private static final QueryResult QUERY_RESULT_INCOMPLETE = QueryResult.builder() + .totalBytesProcessed(TOTAL_BYTES_PROCESSED) + .build(); + + @Test + public void testBuilder() { + assertEquals(SCHEMA, QUERY_RESULT.schema()); + assertEquals(TOTAL_ROWS, QUERY_RESULT.totalRows()); + assertEquals(TOTAL_BYTES_PROCESSED, QUERY_RESULT.totalBytesProcessed()); + assertEquals(CACHE_HIT, QUERY_RESULT.cacheHit()); + assertEquals(CURSOR, QUERY_RESULT.nextPageCursor()); + assertEquals(null, QUERY_RESULT.nextPage()); + assertEquals(null, QUERY_RESULT_INCOMPLETE.schema()); + assertEquals(0L, QUERY_RESULT_INCOMPLETE.totalRows()); + assertEquals(TOTAL_BYTES_PROCESSED, QUERY_RESULT_INCOMPLETE.totalBytesProcessed()); + assertEquals(false, QUERY_RESULT_INCOMPLETE.cacheHit()); + assertEquals(null, QUERY_RESULT_INCOMPLETE.nextPageCursor()); + assertEquals(null, QUERY_RESULT_INCOMPLETE.nextPage()); + } + + @Test + public void testEquals() { + compareQueryResult(QUERY_RESULT, QUERY_RESULT); + compareQueryResult(QUERY_RESULT_INCOMPLETE, QUERY_RESULT_INCOMPLETE); + } + + private void compareQueryResult(QueryResult expected, QueryResult value) { + assertEquals(expected, value); + assertEquals(expected.nextPage(), value.nextPage()); + assertEquals(expected.nextPageCursor(), value.nextPageCursor()); + assertEquals(expected.values(), value.values()); + assertEquals(expected.schema(), value.schema()); + assertEquals(expected.totalRows(), value.totalRows()); + assertEquals(expected.totalBytesProcessed(), value.totalBytesProcessed()); + assertEquals(expected.cacheHit(), value.cacheHit()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/RemoteBigQueryHelperTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/RemoteBigQueryHelperTest.java new file mode 100644 index 000000000000..62a88c1860cd --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/RemoteBigQueryHelperTest.java @@ -0,0 +1,91 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import com.google.gcloud.bigquery.BigQuery.DatasetDeleteOption; +import com.google.gcloud.bigquery.testing.RemoteBigQueryHelper; + +import org.easymock.EasyMock; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.util.concurrent.ExecutionException; + +public class RemoteBigQueryHelperTest { + + private static final String DATASET_NAME = "dataset-name"; + private static final String PROJECT_ID = "project-id"; + private static final String JSON_KEY = "{\n" + + " \"private_key_id\": \"somekeyid\",\n" + + " \"private_key\": \"-----BEGIN PRIVATE KEY-----\\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggS" + + "kAgEAAoIBAQC+K2hSuFpAdrJI\\nnCgcDz2M7t7bjdlsadsasad+fvRSW6TjNQZ3p5LLQY1kSZRqBqylRkzteMOyHg" + + "aR\\n0Pmxh3ILCND5men43j3h4eDbrhQBuxfEMalkG92sL+PNQSETY2tnvXryOvmBRwa/\\nQP/9dJfIkIDJ9Fw9N4" + + "Bhhhp6mCcRpdQjV38H7JsyJ7lih/oNjECgYAt\\nknddadwkwewcVxHFhcZJO+XWf6ofLUXpRwiTZakGMn8EE1uVa2" + + "LgczOjwWHGi99MFjxSer5m9\\n1tCa3/KEGKiS/YL71JvjwX3mb+cewlkcmweBKZHM2JPTk0ZednFSpVZMtycjkbLa" + + "\\ndYOS8V85AgMBewECggEBAKksaldajfDZDV6nGqbFjMiizAKJolr/M3OQw16K6o3/\\n0S31xIe3sSlgW0+UbYlF" + + "4U8KifhManD1apVSC3csafaspP4RZUHFhtBywLO9pR5c\\nr6S5aLp+gPWFyIp1pfXbWGvc5VY/v9x7ya1VEa6rXvL" + + "sKupSeWAW4tMj3eo/64ge\\nsdaceaLYw52KeBYiT6+vpsnYrEkAHO1fF/LavbLLOFJmFTMxmsNaG0tuiJHgjshB\\" + + "n82DpMCbXG9YcCgI/DbzuIjsdj2JC1cascSP//3PmefWysucBQe7Jryb6NQtASmnv\\nCdDw/0jmZTEjpe4S1lxfHp" + + "lAhHFtdgYTvyYtaLZiVVkCgYEA8eVpof2rceecw/I6\\n5ng1q3Hl2usdWV/4mZMvR0fOemacLLfocX6IYxT1zA1FF" + + "JlbXSRsJMf/Qq39mOR2\\nSpW+hr4jCoHeRVYLgsbggtrevGmILAlNoqCMpGZ6vDmJpq6ECV9olliDvpPgWOP+\\nm" + + "YPDreFBGxWvQrADNbRt2dmGsrsCgYEAyUHqB2wvJHFqdmeBsaacewzV8x9WgmeX\\ngUIi9REwXlGDW0Mz50dxpxcK" + + "CAYn65+7TCnY5O/jmL0VRxU1J2mSWyWTo1C+17L0\\n3fUqjxL1pkefwecxwecvC+gFFYdJ4CQ/MHHXU81Lwl1iWdF" + + "Cd2UoGddYaOF+KNeM\\nHC7cmqra+JsCgYEAlUNywzq8nUg7282E+uICfCB0LfwejuymR93CtsFgb7cRd6ak\\nECR" + + "8FGfCpH8ruWJINllbQfcHVCX47ndLZwqv3oVFKh6pAS/vVI4dpOepP8++7y1u\\ncoOvtreXCX6XqfrWDtKIvv0vjl" + + "HBhhhp6mCcRpdQjV38H7JsyJ7lih/oNjECgYAt\\nkndj5uNl5SiuVxHFhcZJO+XWf6ofLUregtevZakGMn8EE1uVa" + + "2AY7eafmoU/nZPT\\n00YB0TBATdCbn/nBSuKDESkhSg9s2GEKQZG5hBmL5uCMfo09z3SfxZIhJdlerreP\\nJ7gSi" + + "dI12N+EZxYd4xIJh/HFDgp7RRO87f+WJkofMQKBgGTnClK1VMaCRbJZPriw\\nEfeFCoOX75MxKwXs6xgrw4W//AYG" + + "GUjDt83lD6AZP6tws7gJ2IwY/qP7+lyhjEqN\\nHtfPZRGFkGZsdaksdlaksd323423d+15/UvrlRSFPNj1tWQmNKk" + + "XyRDW4IG1Oa2p\\nrALStNBx5Y9t0/LQnFI4w3aG\\n-----END PRIVATE KEY-----\\n\",\n" + + " \"client_email\": \"someclientid@developer.gserviceaccount.com\",\n" + + " \"client_id\": \"someclientid.apps.googleusercontent.com\",\n" + + " \"type\": \"service_account\"\n" + + "}"; + private static final InputStream JSON_KEY_STREAM = new ByteArrayInputStream(JSON_KEY.getBytes()); + + @Rule + public ExpectedException thrown = ExpectedException.none(); + @Test + public void testForceDelete() throws InterruptedException, ExecutionException { + BigQuery bigqueryMock = EasyMock.createMock(BigQuery.class); + EasyMock.expect(bigqueryMock.delete(DATASET_NAME, DatasetDeleteOption.deleteContents())) + .andReturn(true); + EasyMock.replay(bigqueryMock); + assertTrue(RemoteBigQueryHelper.forceDelete(bigqueryMock, DATASET_NAME)); + EasyMock.verify(bigqueryMock); + } + + @Test + public void testCreateFromStream() { + RemoteBigQueryHelper helper = RemoteBigQueryHelper.create(PROJECT_ID, JSON_KEY_STREAM); + BigQueryOptions options = helper.options(); + assertEquals(PROJECT_ID, options.projectId()); + assertEquals(60000, options.connectTimeout()); + assertEquals(60000, options.readTimeout()); + assertEquals(10, options.retryParams().retryMaxAttempts()); + assertEquals(6, options.retryParams().retryMinAttempts()); + assertEquals(30000, options.retryParams().maxRetryDelayMillis()); + assertEquals(120000, options.retryParams().totalRetryPeriodMillis()); + assertEquals(250, options.retryParams().initialRetryDelayMillis()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SchemaTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SchemaTest.java new file mode 100644 index 000000000000..d24268d2e7cd --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SchemaTest.java @@ -0,0 +1,77 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; + +import org.junit.Test; + +import java.util.List; + +public class SchemaTest { + + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final List FIELDS = ImmutableList.of(FIELD_SCHEMA1, FIELD_SCHEMA2, + FIELD_SCHEMA3); + private static final Schema TABLE_SCHEMA = Schema.builder().fields(FIELDS).build(); + + @Test + public void testToBuilder() { + compareTableSchema(TABLE_SCHEMA, TABLE_SCHEMA.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(FIELDS, TABLE_SCHEMA.fields()); + Schema schema = TABLE_SCHEMA.toBuilder() + .fields(FIELD_SCHEMA1, FIELD_SCHEMA2) + .addField(FIELD_SCHEMA3) + .build(); + compareTableSchema(TABLE_SCHEMA, schema); + } + + @Test + public void testOf() { + compareTableSchema(TABLE_SCHEMA, Schema.of(FIELDS)); + } + + @Test + public void testToAndFromPb() { + compareTableSchema(TABLE_SCHEMA, Schema.fromPb(TABLE_SCHEMA.toPb())); + } + + private void compareTableSchema(Schema expected, Schema value) { + assertEquals(expected, value); + assertEquals(expected.fields(), value.fields()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java new file mode 100644 index 000000000000..8c80bddbfefb --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/SerializationTest.java @@ -0,0 +1,261 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotSame; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.gcloud.AuthCredentials; +import com.google.gcloud.RetryParams; +import com.google.gcloud.bigquery.TableInfo.StreamingBuffer; + +import org.junit.Test; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Map; + +public class SerializationTest { + + private static final Acl DOMAIN_ACCESS = + new Acl(new Acl.Domain("domain"), Acl.Role.WRITER); + private static final Acl GROUP_ACCESS = + new Acl(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER); + private static final Acl USER_ACCESS = new Acl(new Acl.User("user"), Acl.Role.OWNER); + private static final Acl VIEW_ACCESS = + new Acl(new Acl.View(TableId.of("project", "dataset", "table")), Acl.Role.WRITER); + private static final List ACCESS_RULES = ImmutableList.of(DOMAIN_ACCESS, GROUP_ACCESS, + VIEW_ACCESS, USER_ACCESS); + private static final Long CREATION_TIME = System.currentTimeMillis() - 10; + private static final Long DEFAULT_TABLE_EXPIRATION = 100L; + private static final String DESCRIPTION = "Description"; + private static final String ETAG = "0xFF00"; + private static final String FRIENDLY_NAME = "friendlyDataset"; + private static final String ID = "P/D:1"; + private static final Long LAST_MODIFIED = CREATION_TIME + 50; + private static final String LOCATION = ""; + private static final String SELF_LINK = "http://bigquery/p/d"; + private static final DatasetId DATASET_ID = DatasetId.of("project", "dataset"); + private static final DatasetInfo DATASET_INFO = DatasetInfo.builder(DATASET_ID) + .acl(ACCESS_RULES) + .creationTime(CREATION_TIME) + .defaultTableLifetime(DEFAULT_TABLE_EXPIRATION) + .description(DESCRIPTION) + .etag(ETAG) + .friendlyName(FRIENDLY_NAME) + .id(ID) + .lastModified(LAST_MODIFIED) + .location(LOCATION) + .selfLink(SELF_LINK) + .build(); + private static final TableId TABLE_ID = TableId.of("project", "dataset", "table"); + private static final CsvOptions CSV_OPTIONS = CsvOptions.builder() + .allowJaggedRows(true) + .allowQuotedNewLines(false) + .encoding(StandardCharsets.ISO_8859_1) + .fieldDelimiter(",") + .quote("\"") + .skipLeadingRows(42) + .build(); + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); + private static final StreamingBuffer STREAMING_BUFFER = new StreamingBuffer(1L, 2L, 3L); + private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); + private static final ExternalDataConfiguration EXTERNAL_DATA_CONFIGURATION = + ExternalDataConfiguration.builder(SOURCE_URIS, TABLE_SCHEMA, CSV_OPTIONS) + .ignoreUnknownValues(true) + .maxBadRecords(42) + .build(); + private static final UserDefinedFunction INLINE_FUNCTION = + new UserDefinedFunction.InlineFunction("inline"); + private static final UserDefinedFunction URI_FUNCTION = + new UserDefinedFunction.UriFunction("URI"); + private static final BaseTableInfo TABLE_INFO = + TableInfo.builder(TABLE_ID, TABLE_SCHEMA) + .creationTime(CREATION_TIME) + .description(DESCRIPTION) + .etag(ETAG) + .id(ID) + .location(LOCATION) + .streamingBuffer(STREAMING_BUFFER) + .build(); + private static final ViewInfo VIEW_INFO = + ViewInfo.builder(TABLE_ID, "QUERY") + .creationTime(CREATION_TIME) + .description(DESCRIPTION) + .etag(ETAG) + .id(ID) + .build(); + private static final ExternalTableInfo EXTERNAL_TABLE_INFO = + ExternalTableInfo.builder(TABLE_ID, EXTERNAL_DATA_CONFIGURATION) + .creationTime(CREATION_TIME) + .description(DESCRIPTION) + .etag(ETAG) + .id(ID) + .build(); + private static final JobStatistics JOB_STATISTICS = JobStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .build(); + private static final JobStatistics.ExtractStatistics EXTRACT_STATISTICS = + JobStatistics.ExtractStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .destinationUriFileCounts(ImmutableList.of(42L)) + .build(); + private static final JobStatistics.LoadStatistics LOAD_STATISTICS = + JobStatistics.LoadStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .inputFiles(42L) + .outputBytes(1024L) + .inputBytes(2048L) + .outputRows(24L) + .build(); + private static final JobStatistics.QueryStatistics QUERY_STATISTICS = + JobStatistics.QueryStatistics.builder() + .creationTime(1L) + .endTime(3L) + .startTime(2L) + .totalBytesProcessed(2048L) + .totalBytesBilled(1024L) + .cacheHit(false) + .billingTier(42) + .build(); + private static final BigQueryError BIGQUERY_ERROR = + new BigQueryError("reason", "location", "message", "debugInfo"); + private static final JobStatus JOB_STATUS = new JobStatus(JobStatus.State.DONE, BIGQUERY_ERROR, + ImmutableList.of(BIGQUERY_ERROR)); + private static final JobId JOB_ID = JobId.of("project", "job"); + private static final CopyJobInfo COPY_JOB = CopyJobInfo.of(TABLE_ID, TABLE_ID); + private static final ExtractJobInfo EXTRACT_JOB = ExtractJobInfo.of(TABLE_ID, SOURCE_URIS); + private static final LoadJobInfo LOAD_JOB = LoadJobInfo.of(TABLE_ID, SOURCE_URIS); + private static final QueryJobInfo QUERY_JOB = QueryJobInfo.of("query"); + private static final Map CONTENT1 = + ImmutableMap.of("key", "val1"); + private static final Map CONTENT2 = + ImmutableMap.of("key", "val2"); + private static final InsertAllRequest INSERT_ALL_REQUEST = InsertAllRequest.builder(TABLE_ID) + .addRow(CONTENT1) + .addRow(CONTENT2) + .ignoreUnknownValues(true) + .skipInvalidRows(false) + .build(); + private static final Map> ERRORS_MAP = + ImmutableMap.>of(0L, ImmutableList.of(BIGQUERY_ERROR)); + private static final InsertAllResponse INSERT_ALL_RESPONSE = new InsertAllResponse(ERRORS_MAP); + private static final FieldValue FIELD_VALUE = + new FieldValue(FieldValue.Attribute.PRIMITIVE, "value"); + private static final QueryRequest QUERY_REQUEST = QueryRequest.builder("query") + .useQueryCache(true) + .defaultDataset(DATASET_ID) + .dryRun(false) + .maxResults(42L) + .maxWaitTime(10L) + .build(); + private static final QueryResult QUERY_RESULT = QueryResult.builder() + .schema(TABLE_SCHEMA) + .totalRows(1L) + .totalBytesProcessed(42L) + .cursor("cursor") + .pageFetcher(null) + .results(ImmutableList.>of()) + .build(); + private static final QueryResponse QUERY_RESPONSE = QueryResponse.builder() + .etag(ETAG) + .jobId(JOB_ID) + .jobComplete(true) + .result(QUERY_RESULT) + .build(); + + @Test + public void testServiceOptions() throws Exception { + BigQueryOptions options = BigQueryOptions.builder() + .projectId("p1") + .authCredentials(AuthCredentials.createForAppEngine()) + .build(); + BigQueryOptions serializedCopy = serializeAndDeserialize(options); + assertEquals(options, serializedCopy); + + options = options.toBuilder() + .projectId("p2") + .retryParams(RetryParams.defaultInstance()) + .authCredentials(null) + .build(); + serializedCopy = serializeAndDeserialize(options); + assertEquals(options, serializedCopy); + } + + @Test + public void testModelAndRequests() throws Exception { + Serializable[] objects = {DOMAIN_ACCESS, GROUP_ACCESS, USER_ACCESS, VIEW_ACCESS, DATASET_ID, + DATASET_INFO, TABLE_ID, CSV_OPTIONS, STREAMING_BUFFER, EXTERNAL_DATA_CONFIGURATION, + TABLE_SCHEMA, TABLE_INFO, VIEW_INFO, EXTERNAL_TABLE_INFO, INLINE_FUNCTION, URI_FUNCTION, + JOB_STATISTICS, EXTRACT_STATISTICS, LOAD_STATISTICS, QUERY_STATISTICS, BIGQUERY_ERROR, + JOB_STATUS, JOB_ID, COPY_JOB, EXTRACT_JOB, LOAD_JOB, QUERY_JOB, INSERT_ALL_REQUEST, + INSERT_ALL_RESPONSE, FIELD_VALUE, QUERY_REQUEST, QUERY_RESPONSE, + BigQuery.DatasetOption.fields(), BigQuery.DatasetDeleteOption.deleteContents(), + BigQuery.DatasetListOption.all(), BigQuery.TableOption.fields(), + BigQuery.TableListOption.maxResults(42L), BigQuery.JobOption.fields(), + BigQuery.JobListOption.allUsers()}; + for (Serializable obj : objects) { + Object copy = serializeAndDeserialize(obj); + assertEquals(obj, obj); + assertEquals(obj, copy); + assertNotSame(obj, copy); + assertEquals(copy, copy); + } + } + + @SuppressWarnings("unchecked") + private T serializeAndDeserialize(T obj) + throws IOException, ClassNotFoundException { + ByteArrayOutputStream bytes = new ByteArrayOutputStream(); + try (ObjectOutputStream output = new ObjectOutputStream(bytes)) { + output.writeObject(obj); + } + try (ObjectInputStream input = + new ObjectInputStream(new ByteArrayInputStream(bytes.toByteArray()))) { + return (T) input.readObject(); + } + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableIdTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableIdTest.java new file mode 100644 index 000000000000..9da050bf5951 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableIdTest.java @@ -0,0 +1,56 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class TableIdTest { + + private static final TableId TABLE = TableId.of("dataset", "table"); + private static final TableId TABLE_COMPLETE = TableId.of("project", "dataset", "table"); + + @Test + public void testOf() { + assertEquals(null, TABLE.project()); + assertEquals("dataset", TABLE.dataset()); + assertEquals("table", TABLE.table()); + assertEquals("project", TABLE_COMPLETE.project()); + assertEquals("dataset", TABLE_COMPLETE.dataset()); + assertEquals("table", TABLE_COMPLETE.table()); + } + + @Test + public void testEquals() { + compareTableIds(TABLE, TableId.of("dataset", "table")); + compareTableIds(TABLE_COMPLETE, TableId.of("project", "dataset", "table")); + } + + @Test + public void testToPbAndFromPb() { + compareTableIds(TABLE, TableId.fromPb(TABLE.toPb())); + compareTableIds(TABLE_COMPLETE, TableId.fromPb(TABLE_COMPLETE.toPb())); + } + + private void compareTableIds(TableId expected, TableId value) { + assertEquals(expected, value); + assertEquals(expected.project(), value.project()); + assertEquals(expected.dataset(), value.dataset()); + assertEquals(expected.hashCode(), value.hashCode()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableInfoTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableInfoTest.java new file mode 100644 index 000000000000..c636a31ad1ff --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/TableInfoTest.java @@ -0,0 +1,236 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableList; +import com.google.gcloud.bigquery.TableInfo.StreamingBuffer; + +import org.junit.Test; + +import java.util.List; + +public class TableInfoTest { + + private static final Field FIELD_SCHEMA1 = + Field.builder("StringField", Field.Type.string()) + .mode(Field.Mode.NULLABLE) + .description("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.builder("IntegerField", Field.Type.integer()) + .mode(Field.Mode.REPEATED) + .description("FieldDescription2") + .build(); + private static final Field FIELD_SCHEMA3 = + Field.builder("RecordField", Field.Type.record(FIELD_SCHEMA1, FIELD_SCHEMA2)) + .mode(Field.Mode.REQUIRED) + .description("FieldDescription3") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); + private static final String VIEW_QUERY = "VIEW QUERY"; + private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); + private static final Integer MAX_BAD_RECORDS = 42; + private static final Boolean IGNORE_UNKNOWN_VALUES = true; + private static final String COMPRESSION = "GZIP"; + private static final ExternalDataConfiguration CONFIGURATION = ExternalDataConfiguration + .builder(SOURCE_URIS, TABLE_SCHEMA, FormatOptions.datastoreBackup()) + .compression(COMPRESSION) + .ignoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .maxBadRecords(MAX_BAD_RECORDS) + .build(); + private static final String ETAG = "etag"; + private static final String ID = "project:dataset:table"; + private static final String SELF_LINK = "selfLink"; + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final String FRIENDLY_NAME = "friendlyName"; + private static final String DESCRIPTION = "description"; + private static final Long NUM_BYTES = 42L; + private static final Long NUM_ROWS = 43L; + private static final Long CREATION_TIME = 10L; + private static final Long EXPIRATION_TIME = 100L; + private static final Long LAST_MODIFIED_TIME = 20L; + private static final String LOCATION = "US"; + private static final StreamingBuffer STREAMING_BUFFER = new StreamingBuffer(1L, 2L, 3L); + private static final TableInfo TABLE_INFO = + TableInfo.builder(TABLE_ID, TABLE_SCHEMA) + .creationTime(CREATION_TIME) + .description(DESCRIPTION) + .etag(ETAG) + .expirationTime(EXPIRATION_TIME) + .friendlyName(FRIENDLY_NAME) + .id(ID) + .lastModifiedTime(LAST_MODIFIED_TIME) + .location(LOCATION) + .numBytes(NUM_BYTES) + .numRows(NUM_ROWS) + .selfLink(SELF_LINK) + .streamingBuffer(STREAMING_BUFFER) + .build(); + private static final ExternalTableInfo EXTERNAL_TABLE_INFO = + ExternalTableInfo.builder(TABLE_ID, CONFIGURATION) + .creationTime(CREATION_TIME) + .description(DESCRIPTION) + .etag(ETAG) + .expirationTime(EXPIRATION_TIME) + .friendlyName(FRIENDLY_NAME) + .id(ID) + .lastModifiedTime(LAST_MODIFIED_TIME) + .numBytes(NUM_BYTES) + .numRows(NUM_ROWS) + .selfLink(SELF_LINK) + .build(); + private static final List USER_DEFINED_FUNCTIONS = + ImmutableList.of(UserDefinedFunction.inline("Function"), UserDefinedFunction.fromUri("URI")); + private static final ViewInfo VIEW_INFO = + ViewInfo.builder(TABLE_ID, VIEW_QUERY, USER_DEFINED_FUNCTIONS) + .creationTime(CREATION_TIME) + .description(DESCRIPTION) + .etag(ETAG) + .expirationTime(EXPIRATION_TIME) + .friendlyName(FRIENDLY_NAME) + .id(ID) + .lastModifiedTime(LAST_MODIFIED_TIME) + .numBytes(NUM_BYTES) + .numRows(NUM_ROWS) + .selfLink(SELF_LINK) + .build(); + + @Test + public void testToBuilder() { + compareTableInfo(TABLE_INFO, TABLE_INFO.toBuilder().build()); + compareViewInfo(VIEW_INFO, VIEW_INFO.toBuilder().build()); + compareExternalTableInfo(EXTERNAL_TABLE_INFO, EXTERNAL_TABLE_INFO.toBuilder().build()); + BaseTableInfo tableInfo = TABLE_INFO.toBuilder() + .description("newDescription") + .build(); + assertEquals("newDescription", tableInfo.description()); + tableInfo = tableInfo.toBuilder() + .description("description") + .build(); + compareBaseTableInfo(TABLE_INFO, tableInfo); + } + + @Test + public void testToBuilderIncomplete() { + BaseTableInfo tableInfo = TableInfo.of(TABLE_ID, TABLE_SCHEMA); + assertEquals(tableInfo, tableInfo.toBuilder().build()); + tableInfo = ViewInfo.of(TABLE_ID, VIEW_QUERY); + assertEquals(tableInfo, tableInfo.toBuilder().build()); + tableInfo = ExternalTableInfo.of(TABLE_ID, CONFIGURATION); + assertEquals(tableInfo, tableInfo.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(TABLE_ID, TABLE_INFO.tableId()); + assertEquals(TABLE_SCHEMA, TABLE_INFO.schema()); + assertEquals(CREATION_TIME, TABLE_INFO.creationTime()); + assertEquals(DESCRIPTION, TABLE_INFO.description()); + assertEquals(ETAG, TABLE_INFO.etag()); + assertEquals(EXPIRATION_TIME, TABLE_INFO.expirationTime()); + assertEquals(FRIENDLY_NAME, TABLE_INFO.friendlyName()); + assertEquals(ID, TABLE_INFO.id()); + assertEquals(LAST_MODIFIED_TIME, TABLE_INFO.lastModifiedTime()); + assertEquals(LOCATION, TABLE_INFO.location()); + assertEquals(NUM_BYTES, TABLE_INFO.numBytes()); + assertEquals(NUM_ROWS, TABLE_INFO.numRows()); + assertEquals(SELF_LINK, TABLE_INFO.selfLink()); + assertEquals(STREAMING_BUFFER, TABLE_INFO.streamingBuffer()); + assertEquals(BaseTableInfo.Type.TABLE, TABLE_INFO.type()); + assertEquals(TABLE_ID, VIEW_INFO.tableId()); + assertEquals(null, VIEW_INFO.schema()); + assertEquals(VIEW_QUERY, VIEW_INFO.query()); + assertEquals(BaseTableInfo.Type.VIEW, VIEW_INFO.type()); + assertEquals(CREATION_TIME, VIEW_INFO.creationTime()); + assertEquals(DESCRIPTION, VIEW_INFO.description()); + assertEquals(ETAG, VIEW_INFO.etag()); + assertEquals(EXPIRATION_TIME, VIEW_INFO.expirationTime()); + assertEquals(FRIENDLY_NAME, VIEW_INFO.friendlyName()); + assertEquals(ID, VIEW_INFO.id()); + assertEquals(LAST_MODIFIED_TIME, VIEW_INFO.lastModifiedTime()); + assertEquals(NUM_BYTES, VIEW_INFO.numBytes()); + assertEquals(NUM_ROWS, VIEW_INFO.numRows()); + assertEquals(SELF_LINK, VIEW_INFO.selfLink()); + assertEquals(BaseTableInfo.Type.VIEW, VIEW_INFO.type()); + assertEquals(TABLE_ID, EXTERNAL_TABLE_INFO.tableId()); + assertEquals(null, EXTERNAL_TABLE_INFO.schema()); + assertEquals(CONFIGURATION, EXTERNAL_TABLE_INFO.configuration()); + assertEquals(CREATION_TIME, EXTERNAL_TABLE_INFO.creationTime()); + assertEquals(DESCRIPTION, EXTERNAL_TABLE_INFO.description()); + assertEquals(ETAG, EXTERNAL_TABLE_INFO.etag()); + assertEquals(EXPIRATION_TIME, EXTERNAL_TABLE_INFO.expirationTime()); + assertEquals(FRIENDLY_NAME, EXTERNAL_TABLE_INFO.friendlyName()); + assertEquals(ID, EXTERNAL_TABLE_INFO.id()); + assertEquals(LAST_MODIFIED_TIME, EXTERNAL_TABLE_INFO.lastModifiedTime()); + assertEquals(NUM_BYTES, EXTERNAL_TABLE_INFO.numBytes()); + assertEquals(NUM_ROWS, EXTERNAL_TABLE_INFO.numRows()); + assertEquals(SELF_LINK, EXTERNAL_TABLE_INFO.selfLink()); + assertEquals(BaseTableInfo.Type.EXTERNAL, EXTERNAL_TABLE_INFO.type()); + } + + @Test + public void testToAndFromPb() { + assertTrue(BaseTableInfo.fromPb(TABLE_INFO.toPb()) instanceof TableInfo); + compareTableInfo(TABLE_INFO, BaseTableInfo.fromPb(TABLE_INFO.toPb())); + assertTrue(BaseTableInfo.fromPb(VIEW_INFO.toPb()) instanceof ViewInfo); + compareViewInfo(VIEW_INFO, BaseTableInfo.fromPb(VIEW_INFO.toPb())); + assertTrue(BaseTableInfo.fromPb(EXTERNAL_TABLE_INFO.toPb()) instanceof ExternalTableInfo); + compareExternalTableInfo(EXTERNAL_TABLE_INFO, + BaseTableInfo.fromPb(EXTERNAL_TABLE_INFO.toPb())); + } + + private void compareBaseTableInfo(BaseTableInfo expected, BaseTableInfo value) { + assertEquals(expected, value); + assertEquals(expected.tableId(), value.tableId()); + assertEquals(expected.schema(), value.schema()); + assertEquals(expected.type(), value.type()); + assertEquals(expected.creationTime(), value.creationTime()); + assertEquals(expected.description(), value.description()); + assertEquals(expected.etag(), value.etag()); + assertEquals(expected.expirationTime(), value.expirationTime()); + assertEquals(expected.friendlyName(), value.friendlyName()); + assertEquals(expected.id(), value.id()); + assertEquals(expected.lastModifiedTime(), value.lastModifiedTime()); + assertEquals(expected.numBytes(), value.numBytes()); + assertEquals(expected.numRows(), value.numRows()); + assertEquals(expected.selfLink(), value.selfLink()); + assertEquals(expected.type(), value.type()); + } + + private void compareTableInfo(TableInfo expected, TableInfo value) { + compareBaseTableInfo(expected, value); + assertEquals(expected, value); + assertEquals(expected.location(), value.location()); + assertEquals(expected.streamingBuffer(), value.streamingBuffer()); + } + + private void compareViewInfo(ViewInfo expected, ViewInfo value) { + compareBaseTableInfo(expected, value); + assertEquals(expected, value); + assertEquals(expected.query(), value.query()); + assertEquals(expected.userDefinedFunctions(), value.userDefinedFunctions()); + } + + private void compareExternalTableInfo(ExternalTableInfo expected, ExternalTableInfo value) { + compareBaseTableInfo(expected, value); + assertEquals(expected, value); + assertEquals(expected.configuration(), value.configuration()); + } +} diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/UserDefinedFunctionTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/UserDefinedFunctionTest.java new file mode 100644 index 000000000000..2741aaed89a5 --- /dev/null +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/UserDefinedFunctionTest.java @@ -0,0 +1,56 @@ +/* + * Copyright 2015 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.gcloud.bigquery; + +import static org.junit.Assert.assertEquals; + +import org.junit.Test; + +public class UserDefinedFunctionTest { + + private static final String INLINE = "inline"; + private static final String URI = "uri"; + private static final UserDefinedFunction INLINE_FUNCTION = + new UserDefinedFunction.InlineFunction(INLINE); + private static final UserDefinedFunction URI_FUNCTION = new UserDefinedFunction.UriFunction(URI); + + @Test + public void testConstructor() { + assertEquals(INLINE, INLINE_FUNCTION.content()); + assertEquals(UserDefinedFunction.Type.INLINE, INLINE_FUNCTION.type()); + assertEquals(URI, URI_FUNCTION.content()); + assertEquals(UserDefinedFunction.Type.FROM_URI, URI_FUNCTION.type()); + } + + @Test + public void testFactoryMethod() { + compareUserDefinedFunction(INLINE_FUNCTION, UserDefinedFunction.inline(INLINE)); + compareUserDefinedFunction(URI_FUNCTION, UserDefinedFunction.fromUri(URI)); + } + + @Test + public void testToAndFromPb() { + compareUserDefinedFunction(INLINE_FUNCTION, UserDefinedFunction.fromPb(INLINE_FUNCTION.toPb())); + compareUserDefinedFunction(URI_FUNCTION, UserDefinedFunction.fromPb(URI_FUNCTION.toPb())); + } + + private void compareUserDefinedFunction(UserDefinedFunction expected, UserDefinedFunction value) { + assertEquals(expected, value); + assertEquals(expected.type(), value.type()); + assertEquals(expected.content(), value.content()); + } +} diff --git a/pom.xml b/pom.xml index 7d1751ee179d..d6c043cb23b4 100644 --- a/pom.xml +++ b/pom.xml @@ -72,6 +72,7 @@ gcloud-java-storage gcloud-java gcloud-java-examples + gcloud-java-bigquery