From 86f5e1415fd3ffb99c51eefb2b29f431e8199b14 Mon Sep 17 00:00:00 2001 From: Daniel Tang Date: Fri, 16 Sep 2016 11:09:19 -0700 Subject: [PATCH 1/2] Add snippets to BigQuery Table class and tests --- .../java/com/google/cloud/bigquery/Table.java | 200 ++++++++++- .../bigquery/snippets/TableSnippets.java | 327 ++++++++++++++++++ .../bigquery/snippets/ITTableSnippets.java | 325 +++++++++++++++++ 3 files changed, 851 insertions(+), 1 deletion(-) create mode 100644 google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/TableSnippets.java create mode 100644 google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITTableSnippets.java diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java index d76692635508..b64fd025f2ad 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java @@ -139,6 +139,13 @@ public Table build() { /** * Checks if this table exists. * + *

Example of ensuring that a table exists. + *

 {@code
+   * if (!table.exists()) {
+   *   throw new RuntimeException("Table does not exist.");
+   * }
+   * }
+ * * @return {@code true} if this table exists, {@code false} otherwise * @throws BigQueryException upon failure */ @@ -149,6 +156,13 @@ public boolean exists() { /** * Fetches current table's latest information. Returns {@code null} if the table does not exist. * + *

Example of fetching a table's latest information, specifying particular table field options. + *

 {@code
+   * TableField field1 = TableField.LAST_MODIFIED_TIME;
+   * TableField field2 = TableField.NUM_ROWS;
+   * Table reloaded = table.reload(TableOption.fields(field1, field2));
+   * }
+ * * @param options table options * @return a {@code Table} object with latest information or {@code null} if not found * @throws BigQueryException upon failure @@ -161,6 +175,13 @@ public Table reload(TableOption... options) { * Updates the table's information with this table's information. Dataset's and table's * user-defined ids cannot be changed. A new {@code Table} object is returned. * + *

Example of updating a table's information, specifying particular table field options. + *

 {@code
+   * TableField field1 = TableField.LAST_MODIFIED_TIME;
+   * TableField field2 = TableField.NUM_ROWS;
+   * Table updated = table.update(TableOption.fields(field1, field2));
+   * }
+ * * @param options dataset options * @return a {@code Table} object with updated information * @throws BigQueryException upon failure @@ -172,6 +193,11 @@ public Table update(TableOption... options) { /** * Deletes this table. * + *

Example of deleting a table. + *

 {@code
+   * table.delete();
+   * }
+ * * @return {@code true} if table was deleted, {@code false} if it was not found * @throws BigQueryException upon failure */ @@ -182,6 +208,23 @@ public boolean delete() { /** * Insert rows into the table. * + *

Example of inserting rows into a table. + *

 {@code
+   * String rowId1 = "rowId1";
+   * String rowId2 = "rowId2";
+   * List rows = new ArrayList<>();
+   * Map row1 = new HashMap<>();
+   * row1.put("stringField", "value1");
+   * row1.put("booleanField", true);
+   * Map row2 = new HashMap<>();
+   * row2.put("stringField", "value2");
+   * row2.put("booleanField", false);
+   * rows.add(RowToInsert.of(rowId1, row1));
+   * rows.add(RowToInsert.of(rowId2, row2));
+   * InsertAllResponse response = table.insert(rows);
+   * // do something with response
+   * }
+ * * @param rows rows to be inserted * @throws BigQueryException upon failure */ @@ -193,6 +236,23 @@ public InsertAllResponse insert(Iterable rows) /** * Insert rows into the table. * + *

Example of inserting rows into a table which ignores invalid rows. + *

 {@code
+   * String rowId1 = "rowId1";
+   * String rowId2 = "rowId2";
+   * List rows = new ArrayList<>();
+   * Map row1 = new HashMap<>();
+   * row1.put("stringField", 1);
+   * row1.put("booleanField", true);
+   * Map row2 = new HashMap<>();
+   * row2.put("stringField", "value2");
+   * row2.put("booleanField", false);
+   * rows.add(RowToInsert.of(rowId1, row1));
+   * rows.add(RowToInsert.of(rowId2, row2));
+   * InsertAllResponse response = table.insert(rows, true, true);
+   * // do something with response
+   * }
+ * * @param rows rows to be inserted * @param skipInvalidRows whether to insert all valid rows, even if invalid rows exist. If not set * the entire insert operation will fail if rows to be inserted contain an invalid row @@ -202,7 +262,7 @@ public InsertAllResponse insert(Iterable rows) * @throws BigQueryException upon failure */ public InsertAllResponse insert(Iterable rows, - boolean skipInvalidRows, boolean ignoreUnknownValues) throws BigQueryException { + boolean skipInvalidRows, boolean ignoreUnknownValues) throws BigQueryException { InsertAllRequest request = InsertAllRequest.builder(tableId(), rows) .skipInvalidRows(skipInvalidRows) .ignoreUnknownValues(ignoreUnknownValues) @@ -213,6 +273,12 @@ public InsertAllResponse insert(Iterable rows, /** * Returns the paginated list rows in this table. * + *

Example of getting a paginated list of rows in a table. + *

 {@code
+   * Page> page = table.list(TableDataListOption.pageSize(100));
+   * // do something with page
+   * }
+ * * @param options table data list options * @throws BigQueryException upon failure */ @@ -225,6 +291,27 @@ public Page> list(TableDataListOption... options) * Starts a BigQuery Job to copy the current table to the provided destination table. Returns the * started {@link Job} object. * + *

Example of copying a table to a destination table and dataset referenced by name. + *

 {@code
+   * String datasetName = "my_dataset";
+   * String tableName = "my_destination_table";
+   * Job job = table.copy(datasetName, tableName);
+   *
+   * // Wait for the job to complete.
+   * try {
+   *   Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+   *       WaitForOption.timeout(60, TimeUnit.SECONDS));
+   *   if (completedJob != null && completedJob.status().error() == null) {
+   *     // Job completed successfully.
+   *   } else {
+   *     // Handle error case.
+   *   }
+   * } catch (InterruptedException | TimeoutException e) {
+   *   // Handle interrupted wait.
+   * }
+   *
+   * }
+ * * @param destinationDataset the user-defined id of the destination dataset * @param destinationTable the user-defined id of the destination table * @param options job options @@ -239,6 +326,29 @@ public Job copy(String destinationDataset, String destinationTable, JobOption... * Starts a BigQuery Job to copy the current table to the provided destination table. Returns the * started {@link Job} object. * + *

Example copying a table to a destination table referenced by table ID. + *

 {@code
+   * String dataset = "my_dataset";
+   * String tableName = "copy_destination";
+   * TableId destinationId = TableId.of(dataset, tableName);
+   * JobOption options = JobOption.fields(JobField.STATUS, JobField.USER_EMAIL);
+   *
+   * Job job = table.copy(destinationId, options);
+   *
+   * // Wait for the job to complete.
+   * try {
+   *   Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+   *       WaitForOption.timeout(60, TimeUnit.SECONDS));
+   *   if (completedJob != null && completedJob.status().error() == null) {
+   *     // Job completed successfully.
+   *   } else {
+   *     // Handle error case.
+   *   }
+   * } catch (InterruptedException | TimeoutException e) {
+   *   // Handle interrupted wait.
+   * }
+   * }
+ * * @param destinationTable the destination table of the copy job * @param options job options * @throws BigQueryException upon failure @@ -253,6 +363,26 @@ public Job copy(TableId destinationTable, JobOption... options) * Starts a BigQuery Job to extract the current table to the provided destination URI. Returns the * started {@link Job} object. * + *

Example extracting data to single Google Cloud Storage file. + *

 {@code
+   * String format = "CSV";
+   * String gcsUrl = "gs://myapp.appspot.com/filename.csv";
+   * Job job = table.extract(format, gcsUrl);
+   *
+   * // Wait for the job to complete.
+   * try {
+   *   Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+   *       WaitForOption.timeout(60, TimeUnit.SECONDS));
+   *   if (completedJob != null && completedJob.status().error() == null) {
+   *     // Job completed successfully.
+   *   } else {
+   *     // Handle error case.
+   *   }
+   * } catch (InterruptedException | TimeoutException e) {
+   *   // Handle interrupted wait.
+   * }
+   * }
+ * * @param format the format of the extracted data * @param destinationUri the fully-qualified Google Cloud Storage URI (e.g. gs://bucket/path) * where the extracted table should be written @@ -268,6 +398,31 @@ public Job extract(String format, String destinationUri, JobOption... options) * Starts a BigQuery Job to extract the current table to the provided destination URIs. Returns * the started {@link Job} object. * + *

Example extracting data to a list of Google Cloud Storage files. + *

 {@code
+   * String format = "CSV";
+   * String gcsUrl1 = "gs://myapp.appspot.com/PartitionA_*.csv";
+   * String gcsUrl2 = "gs://myapp.appspot.com/PartitionB_*.csv";
+   * List destinationUris = new ArrayList<>();
+   * destinationUris.add(gcsUrl1);
+   * destinationUris.add(gcsUrl2);
+   *
+   * Job job = table.extract(format, destinationUris);
+   *
+   * // Wait for the job to complete.
+   * try {
+   *   Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+   *       WaitForOption.timeout(60, TimeUnit.SECONDS));
+   *   if (completedJob != null && completedJob.status().error() == null) {
+   *     // Job completed successfully.
+   *   } else {
+   *     // Handle error case.
+   *   }
+   * } catch (InterruptedException | TimeoutException e) {
+   *   // Handle interrupted wait.
+   * }
+   * }
+ * * @param format the format of the exported data * @param destinationUris the fully-qualified Google Cloud Storage URIs (e.g. gs://bucket/path) * where the extracted table should be written @@ -285,6 +440,25 @@ public Job extract(String format, List destinationUris, JobOption... opt * Starts a BigQuery Job to load data into the current table from the provided source URI. Returns * the started {@link Job} object. * + *

Example loading data from a single Google Cloud Storage file. + *

 {@code
+   * String sourceUri = "gs://myapp.appspot.com/filename.csv";
+   * Job job = table.load(FormatOptions.csv(), sourceUri);
+   *
+   * // Wait for the job to complete.
+   * try {
+   *   Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+   *       WaitForOption.timeout(60, TimeUnit.SECONDS));
+   *   if (completedJob != null && completedJob.status().error() == null) {
+   *     // Job completed successfully.
+   *   } else {
+   *     // Handle error case.
+   *   }
+   * } catch (InterruptedException | TimeoutException e) {
+   *   // Handle interrupted wait.
+   * }
+   * }
+ * * @param format the format of the data to load * @param sourceUri the fully-qualified Google Cloud Storage URI (e.g. gs://bucket/path) from * which to load the data @@ -300,6 +474,30 @@ public Job load(FormatOptions format, String sourceUri, JobOption... options) * Starts a BigQuery Job to load data into the current table from the provided source URIs. * Returns the started {@link Job} object. * + *

Example loading data from a list of Google Cloud Storage files. + *

 {@code
+   * String gcsUrl1 = "gs://myapp.appspot.com/PartitionA_000000000000.csv";
+   * String gcsUrl2 = "gs://myapp.appspot.com/PartitionB_000000000000.csv";
+   * List sourceUris = new ArrayList<>();
+   * sourceUris.add(gcsUrl1);
+   * sourceUris.add(gcsUrl2);
+   *
+   * Job job = table.load(FormatOptions.csv(), sourceUris);
+   *
+   * // Wait for the job to complete.
+   * try {
+   *   Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+   *       WaitForOption.timeout(60, TimeUnit.SECONDS));
+   *   if (completedJob != null && completedJob.status().error() == null) {
+   *     // Job completed successfully.
+   *   } else {
+   *     // Handle error case.
+   *   }
+   * } catch (InterruptedException | TimeoutException e) {
+   *   // Handle interrupted wait.
+   * }
+   * }
+ * * @param format the format of the exported data * @param sourceUris the fully-qualified Google Cloud Storage URIs (e.g. gs://bucket/path) from * which to load the data diff --git a/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/TableSnippets.java b/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/TableSnippets.java new file mode 100644 index 000000000000..8f9e0d46bc91 --- /dev/null +++ b/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/TableSnippets.java @@ -0,0 +1,327 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.examples.bigquery.snippets; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import com.google.cloud.Page; +import com.google.cloud.WaitForOption; +import com.google.cloud.bigquery.BigQuery.JobField; +import com.google.cloud.bigquery.BigQuery.JobOption; +import com.google.cloud.bigquery.BigQuery.TableDataListOption; +import com.google.cloud.bigquery.BigQuery.TableField; +import com.google.cloud.bigquery.BigQuery.TableOption; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FormatOptions; +import com.google.cloud.bigquery.InsertAllRequest.RowToInsert; +import com.google.cloud.bigquery.InsertAllResponse; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.Table; +import com.google.cloud.bigquery.TableId; + +/* + * EDITING INSTRUCTIONS + * This file is referenced in Table’s javadoc. Any change to this file should be reflected in + * Table’s javadoc. + */ +public class TableSnippets { + private final Table table; + + public TableSnippets(Table table) { + this.table = table; + } + + /** + * Example of ensuring that a table exists. + */ + // [TARGET exists()] + public void checkExists() { + // [START checkExists] + if (!table.exists()) { + throw new IllegalArgumentException("Table does not exist."); + } + // [END checkExists] + } + + /** + * Example of fetching a table's latest information, specifying particular table field options. + */ + // [TARGET reload(TableOption...)] + // [VARIABLE TableField.LAST_MODIFIED_TIME] + // [VARIABLE TableField.NUM_ROWS] + public Table reloadTableWithFields(TableField field1, TableField field2) { + // [START reloadTableWithFields] + Table reloaded = table.reload(TableOption.fields(field1, field2)); + // [END reloadTableWithFields] + return reloaded; + } + + /** + * Example of updating a table's information, specifying particular table field options. + */ + // [TARGET update(TableOption...)] + // [VARIABLE TableField.LAST_MODIFIED_TIME] + // [VARIABLE TableField.NUM_ROWS] + public Table updateTableWithFields(TableField field1, TableField field2) { + // [START updateTableWithFields] + Table updated = table.update(TableOption.fields(field1, field2)); + // [END updateTableWithFields] + return updated; + } + + /** + * Example of deleting a table. + */ + // [TARGET delete()] + public void delete() { + // [START delete] + table.delete(); + // [END delete] + } + + /** + * Example of inserting rows into a table. + */ + // [TARGET insert(Iterable)] + // [VARIABLE "rowId1"] + // [VARIABLE "rowId2"] + public InsertAllResponse insert(String rowId1, String rowId2) { + // [START insert] + List rows = new ArrayList<>(); + Map row1 = new HashMap<>(); + row1.put("stringField", "value1"); + row1.put("booleanField", true); + Map row2 = new HashMap<>(); + row2.put("stringField", "value2"); + row2.put("booleanField", false); + rows.add(RowToInsert.of(rowId1, row1)); + rows.add(RowToInsert.of(rowId2, row2)); + InsertAllResponse response = table.insert(rows); + // do something with response + // [END insert] + return response; + } + + /** + * Example of inserting rows into a table which ignores invalid rows. + */ + // [TARGET insert(Iterable, boolean, boolean)] + // [VARIABLE "rowId1"] + // [VARIABLE "rowId2"] + public InsertAllResponse insertWithParams(String rowId1, String rowId2) { + // [START insertWithParams] + List rows = new ArrayList<>(); + Map row1 = new HashMap<>(); + row1.put("stringField", 1); + row1.put("booleanField", true); + Map row2 = new HashMap<>(); + row2.put("stringField", "value2"); + row2.put("booleanField", false); + rows.add(RowToInsert.of(rowId1, row1)); + rows.add(RowToInsert.of(rowId2, row2)); + InsertAllResponse response = table.insert(rows, true, true); + // do something with response + // [END insertWithParams] + return response; + } + + /** + * Example of getting a paginated list of rows in a table. + */ + // [TARGET list(TableDataListOption...)] + public Page> list() { + // [START list] + Page> page = table.list(TableDataListOption.pageSize(100)); + // do something with page + // [END list] + return page; + } + + /** + * Example of copying a table to a destination table and dataset referenced by name. + */ + // [TARGET copy(String, String, JobOption...)] + // [VARIABLE "my_dataset"] + // [VARIABLE "my_destination_table"] + public Job copy(String datasetName, String tableName) { + // [START copy] + Job job = table.copy(datasetName, tableName); + + // Wait for the job to complete. + try { + Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS), + WaitForOption.timeout(60, TimeUnit.SECONDS)); + if (completedJob != null && completedJob.status().error() == null) { + // Job completed successfully. + } else { + // Handle error case. + } + } catch (InterruptedException | TimeoutException e) { + // Handle interrupted wait. + } + + // [END copy] + return job; + } + + /** + * Example copying a table to a destination table referenced by table ID. + */ + // [TARGET copy(TableId, JobOption...)] + // [VARIABLE "my_dataset"] + // [VARIABLE "copy_destination"] + public Job copyTableId(String dataset, String tableName) throws BigQueryException { + // [START copyTableId] + TableId destinationId = TableId.of(dataset, tableName); + JobOption options = JobOption.fields(JobField.STATUS, JobField.USER_EMAIL); + + Job job = table.copy(destinationId, options); + + // Wait for the job to complete. + try { + Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS), + WaitForOption.timeout(60, TimeUnit.SECONDS)); + if (completedJob != null && completedJob.status().error() == null) { + // Job completed successfully. + } else { + // Handle error case. + } + } catch (InterruptedException | TimeoutException e) { + // Handle interrupted wait. + } + // [END copyTableId] + return job; + } + + /** + * Example extracting data to a list of Google Cloud Storage files. + */ + // [TARGET extract(String, List, JobOption...)] + // [VARIABLE "CSV"] + // [VARIABLE "gs://myapp.appspot.com/PartitionA_*.csv"] + // [VARIABLE "gs://myapp.appspot.com/PartitionB_*.csv"] + public Job extractList(String format, String gcsUrl1, String gcsUrl2) { + // [START extractList] + List destinationUris = new ArrayList<>(); + destinationUris.add(gcsUrl1); + destinationUris.add(gcsUrl2); + + Job job = table.extract(format, destinationUris); + + // Wait for the job to complete. + try { + Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS), + WaitForOption.timeout(60, TimeUnit.SECONDS)); + if (completedJob != null && completedJob.status().error() == null) { + // Job completed successfully. + } else { + // Handle error case. + } + } catch (InterruptedException | TimeoutException e) { + // Handle interrupted wait. + } + // [END extractList] + return job; + } + + /** + * Example extracting data to single Google Cloud Storage file. + */ + // [TARGET extract(String, String, JobOption...)] + // [VARIABLE "CSV"] + // [VARIABLE "gs://myapp.appspot.com/filename.csv"] + public Job extractSingle(String format, String gcsUrl) { + // [START extractSingle] + Job job = table.extract(format, gcsUrl); + + // Wait for the job to complete. + try { + Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS), + WaitForOption.timeout(60, TimeUnit.SECONDS)); + if (completedJob != null && completedJob.status().error() == null) { + // Job completed successfully. + } else { + // Handle error case. + } + } catch (InterruptedException | TimeoutException e) { + // Handle interrupted wait. + } + // [END extractSingle] + return job; + } + + /** + * Example loading data from a list of Google Cloud Storage files. + */ + // [TARGET load(FormatOptions, List, JobOption...)] + // [VARIABLE "gs://myapp.appspot.com/PartitionA_000000000000.csv"] + // [VARIABLE "gs://myapp.appspot.com/PartitionB_000000000000.csv"] + public Job loadList(String gcsUrl1, String gcsUrl2) { + // [START loadList] + List sourceUris = new ArrayList<>(); + sourceUris.add(gcsUrl1); + sourceUris.add(gcsUrl2); + + Job job = table.load(FormatOptions.csv(), sourceUris); + + // Wait for the job to complete. + try { + Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS), + WaitForOption.timeout(60, TimeUnit.SECONDS)); + if (completedJob != null && completedJob.status().error() == null) { + // Job completed successfully. + } else { + // Handle error case. + } + } catch (InterruptedException | TimeoutException e) { + // Handle interrupted wait. + } + // [END loadList] + return job; + } + + /** + * Example loading data from a single Google Cloud Storage file. + */ + // [TARGET load(FormatOptions, String, JobOption...)] + // [VARIABLE "gs://myapp.appspot.com/filename.csv"] + public Job loadSingle(String sourceUri) { + // [START loadSingle] + Job job = table.load(FormatOptions.csv(), sourceUri); + + // Wait for the job to complete. + try { + Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS), + WaitForOption.timeout(60, TimeUnit.SECONDS)); + if (completedJob != null && completedJob.status().error() == null) { + // Job completed successfully. + } else { + // Handle error case. + } + } catch (InterruptedException | TimeoutException e) { + // Handle interrupted wait. + } + // [END loadSingle] + return job; + } +} diff --git a/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITTableSnippets.java b/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITTableSnippets.java new file mode 100644 index 000000000000..e2b313112820 --- /dev/null +++ b/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITTableSnippets.java @@ -0,0 +1,325 @@ +/* + * Copyright 2016 Google Inc. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.examples.bigquery.snippets; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.util.List; +import java.util.Set; +import java.util.logging.Logger; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import com.google.cloud.Page; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQuery.DatasetDeleteOption; +import com.google.cloud.bigquery.BigQuery.TableDataListOption; +import com.google.cloud.bigquery.BigQuery.TableField; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.DatasetInfo; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Type; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.InsertAllResponse; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardTableDefinition; +import com.google.cloud.bigquery.Table; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.TableInfo; +import com.google.common.base.Function; +import com.google.common.base.Objects; +import com.google.common.collect.FluentIterable; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; + +/** + * Integration tests for {@link TableSnippets}. + */ +public class ITTableSnippets { + private static final String BASE_TABLE_NAME = "my_table"; + private static final String DATASET_NAME = "my_dataset"; + private static final String COPY_DATASET_NAME = "my_copy_dataset"; + private static final Value ROW1 = new Value("value1", true); + private static final Value ROW2 = new Value("value2", false); + private static final Logger log = Logger.getLogger(ITTableSnippets.class.getName()); + + private static BigQuery bigquery; + private Table table; + private TableSnippets tableSnippets; + + private static final String DOOMED_TABLE_NAME = "doomed_table"; + private static final String DOOMED_DATASET_NAME = "doomed_dataset"; + public static final TableId DOOMED_TABLE_ID = TableId.of(DOOMED_DATASET_NAME, DOOMED_TABLE_NAME); + + private static Table doomedTable; + private static TableSnippets doomedTableSnippets; + + private static int nextTableNumber; + + @BeforeClass + public static void beforeClass() { + bigquery = BigQueryOptions.defaultInstance().service(); + bigquery.create(DatasetInfo.builder(DATASET_NAME).build()); + bigquery.create(DatasetInfo.builder(COPY_DATASET_NAME).build()); + bigquery.create(DatasetInfo.builder(DOOMED_DATASET_NAME).build()); + } + + @Before + public void before() { + ++nextTableNumber; + StandardTableDefinition.Builder builder = StandardTableDefinition.builder(); + builder.schema( + Schema.of(Field.of("stringField", Type.string()), Field.of("booleanField", Type.bool()))); + table = bigquery.create(TableInfo.of(getTableId(), builder.build())); + bigquery.create(TableInfo.of(getCopyTableId(), builder.build())); + tableSnippets = new TableSnippets(table); + + doomedTable = bigquery.create(TableInfo.of(DOOMED_TABLE_ID, builder.build())); + doomedTableSnippets = new TableSnippets(doomedTable); + } + + @After + public void after() { + bigquery.delete(getTableId()); + bigquery.delete(getCopyTableId()); + bigquery.delete(DOOMED_TABLE_ID); + } + + @AfterClass + public static void afterClass() { + bigquery.delete(DATASET_NAME, DatasetDeleteOption.deleteContents()); + bigquery.delete(COPY_DATASET_NAME, DatasetDeleteOption.deleteContents()); + bigquery.delete(DOOMED_DATASET_NAME, DatasetDeleteOption.deleteContents()); + } + + private String getTableName() { + return BASE_TABLE_NAME + nextTableNumber; + } + + private TableId getTableId() { + return TableId.of(DATASET_NAME, getTableName()); + } + + private String getCopyTableName() { + return BASE_TABLE_NAME + "_copy_" + nextTableNumber; + } + + private TableId getCopyTableId() { + return TableId.of(COPY_DATASET_NAME, getCopyTableName()); + } + + @Test + public void testCheckExists() { + log.info("testCheckExists"); + tableSnippets.checkExists(); + } + + @Test + public void testReloadTableWithFields() { + log.info("testReloadTableWithFields"); + tableSnippets.reloadTableWithFields(TableField.LAST_MODIFIED_TIME, TableField.NUM_ROWS); + } + + @Test + public void testUpdateTableWithFields() { + log.info("testUpdateTableWithFields"); + tableSnippets.updateTableWithFields(TableField.LAST_MODIFIED_TIME, TableField.NUM_ROWS); + } + + @Test + public void testDelete() { + log.info("testDelete"); + doomedTableSnippets.delete(); + } + + @Test + public void testInsert() { + log.info("testInsert"); + InsertAllResponse response = tableSnippets.insert("row1", "row2"); + assertFalse(response.hasErrors()); + verifyTestRows(table); + } + + @Test + public void testInsertParams() throws InterruptedException { + InsertAllResponse response = tableSnippets.insertWithParams("row1", "row2"); + assertTrue(response.hasErrors()); + List> rows = ImmutableList.copyOf(tableSnippets.list().values()); + while (rows.isEmpty()) { + Thread.sleep(500); + rows = ImmutableList.copyOf(tableSnippets.list().values()); + } + Set values = + FluentIterable.from(rows).transform(new Function, Value>() { + @Override + public Value apply(List row) { + return new Value(row.get(0).stringValue(), row.get(1).booleanValue()); + } + }).toSet(); + assertEquals(ImmutableSet.of(ROW2), values); + } + + @Test + public void testList() throws InterruptedException { + List> rows = ImmutableList.copyOf(tableSnippets.list().values()); + assertEquals(0, rows.size()); + + InsertAllResponse response = tableSnippets.insert("row1", "row2"); + assertFalse(response.hasErrors()); + rows = ImmutableList.copyOf(tableSnippets.list().values()); + while (rows.isEmpty()) { + Thread.sleep(500); + rows = ImmutableList.copyOf(tableSnippets.list().values()); + } + assertEquals(2, rows.size()); + } + + @Test + public void testCopy() { + tableSnippets.copy(COPY_DATASET_NAME, BASE_TABLE_NAME); + } + + @Test + public void testCopyTableId() { + log.info("testCopyTableId"); + tableSnippets.copyTableId(COPY_DATASET_NAME, getCopyTableName()); + } + + @Test + public void testExtractList() { + log.info("testExtractList"); + String projectId = bigquery.options().projectId(); + String gcsFile1 = "gs://" + projectId + ".appspot.com/extractTestA_*.csv"; + String gcsFile2 = "gs://" + projectId + ".appspot.com/extractTestB_*.csv"; + tableSnippets.extractList("CSV", gcsFile1, gcsFile2); + } + + @Test + public void testExtractSingle() { + log.info("testExtractSingle"); + String projectId = bigquery.options().projectId(); + String gcsFile = "gs://" + projectId + ".appspot.com/extractTest.csv"; + tableSnippets.extractSingle("CSV", gcsFile); + } + + @Test + public void testLoadList() { + log.info("testLoadList"); + String projectId = bigquery.options().projectId(); + String gcsFile1 = "gs://" + projectId + ".appspot.com/loadTest1.csv"; + String gcsFile2 = "gs://" + projectId + ".appspot.com/loadTest2.csv"; + + // Before we can load, we should make sure those files exist. + tableSnippets.extractSingle("CSV", gcsFile1); + tableSnippets.extractSingle("CSV", gcsFile2); + + tableSnippets.loadList(gcsFile1, gcsFile2); + } + + @Test + public void testLoadSingle() { + log.info("testLoadSingle"); + String projectId = bigquery.options().projectId(); + String gcsFile = "gs://" + projectId + ".appspot.com/loadSingle.csv"; + + // Before we can load, we should make sure the file exists. + tableSnippets.extractSingle("CSV", gcsFile); + + tableSnippets.loadSingle(gcsFile); + } + + private static class Value { + final String stringField; + final boolean booleanField; + + Value(String stringField, boolean booleanField) { + this.stringField = stringField; + this.booleanField = booleanField; + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof Value) { + Value o = (Value) obj; + return Objects.equal(stringField, o.stringField) && booleanField == o.booleanField; + } + return false; + } + + @Override + public int hashCode() { + return Objects.hashCode(stringField, booleanField); + } + + @Override + public String toString() { + return ""; + } + } + + /** + * Verifies that the given table has the rows inserted by InsertTestRows(). + * + * @param checkTable The table to query. + */ + private void verifyTestRows(Table checkTable) { + List> rows = waitForTableRows(checkTable, 2); + // Verify that the table data matches what it's supposed to. + Set values = + FluentIterable.from(rows).transform(new Function, Value>() { + @Override + public Value apply(List row) { + return new Value(row.get(0).stringValue(), row.get(1).booleanValue()); + } + }).toSet(); + assertEquals(ImmutableSet.of(ROW2, ROW1), values); + } + + /** + * Waits for a specified number of rows to appear in the given table. This is used by + * verifyTestRows to wait for data to appear before verifying. + * + * @param checkTable + * @param numRows + * @return The rows from the table. + */ + private List> waitForTableRows(Table checkTable, int numRows) { + // Wait for the data to appear. + Page> page = checkTable.list(TableDataListOption.pageSize(100)); + List> rows = ImmutableList.copyOf(page.values()); + int numSleeps = 0; + while (rows.size() != numRows) { + assertTrue(numSleeps < 10); + log.info("Sleeping and waiting for " + numRows + " test rows to appear (currently " + + rows.size() + ")..."); + try { + ++numSleeps; + Thread.sleep(5000); + } catch (InterruptedException e) { + } + page = checkTable.list(TableDataListOption.pageSize(100)); + rows = ImmutableList.copyOf(page.values()); + } + return rows; + } +} From 2d1fc9251856c9e9f777ff6e0a7f9494083cae0f Mon Sep 17 00:00:00 2001 From: Marco Ziccardi Date: Tue, 20 Sep 2016 18:07:03 +0200 Subject: [PATCH 2/2] Minor consistency fixes to Table snippets, add snippets to javadoc --- .../java/com/google/cloud/bigquery/Table.java | 126 +++++----- .../bigquery/snippets/TableSnippets.java | 175 +++++++------ .../bigquery/snippets/ITTableSnippets.java | 232 +++++++----------- 3 files changed, 251 insertions(+), 282 deletions(-) diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java index b64fd025f2ad..26f79be6f754 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java @@ -139,10 +139,13 @@ public Table build() { /** * Checks if this table exists. * - *

Example of ensuring that a table exists. + *

Example of checking if the table exists. *

 {@code
-   * if (!table.exists()) {
-   *   throw new RuntimeException("Table does not exist.");
+   * boolean exists = table.exists();
+   * if (exists) {
+   *   // the table exists
+   * } else {
+   *   // the table was not found
    * }
    * }
* @@ -156,11 +159,15 @@ public boolean exists() { /** * Fetches current table's latest information. Returns {@code null} if the table does not exist. * - *

Example of fetching a table's latest information, specifying particular table field options. + *

Example of fetching the table's latest information, specifying particular table fields to + * get. *

 {@code
    * TableField field1 = TableField.LAST_MODIFIED_TIME;
    * TableField field2 = TableField.NUM_ROWS;
-   * Table reloaded = table.reload(TableOption.fields(field1, field2));
+   * Table latestTable = table.reload(TableOption.fields(field1, field2));
+   * if (latestTable == null) {
+   *   // the table was not found
+   * }
    * }
* * @param options table options @@ -175,11 +182,9 @@ public Table reload(TableOption... options) { * Updates the table's information with this table's information. Dataset's and table's * user-defined ids cannot be changed. A new {@code Table} object is returned. * - *

Example of updating a table's information, specifying particular table field options. + *

Example of updating the table's information. *

 {@code
-   * TableField field1 = TableField.LAST_MODIFIED_TIME;
-   * TableField field2 = TableField.NUM_ROWS;
-   * Table updated = table.update(TableOption.fields(field1, field2));
+   * Table updatedTable = table.toBuilder().description("new description").build().update();
    * }
* * @param options dataset options @@ -193,9 +198,14 @@ public Table update(TableOption... options) { /** * Deletes this table. * - *

Example of deleting a table. + *

Example of deleting the table. *

 {@code
-   * table.delete();
+   * boolean deleted = table.delete();
+   * if (deleted) {
+   *   // the table was deleted
+   * } else {
+   *   // the table was not found
+   * }
    * }
* * @return {@code true} if table was deleted, {@code false} if it was not found @@ -208,7 +218,7 @@ public boolean delete() { /** * Insert rows into the table. * - *

Example of inserting rows into a table. + *

Example of inserting rows into the table. *

 {@code
    * String rowId1 = "rowId1";
    * String rowId2 = "rowId2";
@@ -236,7 +246,7 @@ public InsertAllResponse insert(Iterable rows)
   /**
    * Insert rows into the table.
    *
-   * 

Example of inserting rows into a table which ignores invalid rows. + *

Example of inserting rows into the table, ignoring invalid rows. *

 {@code
    * String rowId1 = "rowId1";
    * String rowId2 = "rowId2";
@@ -273,10 +283,14 @@ public InsertAllResponse insert(Iterable rows,
   /**
    * Returns the paginated list rows in this table.
    *
-   * 

Example of getting a paginated list of rows in a table. + *

Example of listing rows in the table. *

 {@code
    * Page> page = table.list(TableDataListOption.pageSize(100));
-   * // do something with page
+   * Iterator> rowIterator = page.iterateAll();
+   * while (rowIterator.hasNext()) {
+   *   List row = rowIterator.next();
+   *   // do something with the row
+   * }
    * }
* * @param options table data list options @@ -291,25 +305,23 @@ public Page> list(TableDataListOption... options) * Starts a BigQuery Job to copy the current table to the provided destination table. Returns the * started {@link Job} object. * - *

Example of copying a table to a destination table and dataset referenced by name. + *

Example of copying the table to a destination table. *

 {@code
    * String datasetName = "my_dataset";
    * String tableName = "my_destination_table";
    * Job job = table.copy(datasetName, tableName);
-   *
    * // Wait for the job to complete.
    * try {
    *   Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
-   *       WaitForOption.timeout(60, TimeUnit.SECONDS));
+   *       WaitForOption.timeout(3, TimeUnit.MINUTES));
    *   if (completedJob != null && completedJob.status().error() == null) {
-   *     // Job completed successfully.
+   *     // Job completed successfully
    *   } else {
-   *     // Handle error case.
+   *     // Handle error case
    *   }
    * } catch (InterruptedException | TimeoutException e) {
-   *   // Handle interrupted wait.
+   *   // Handle interrupted wait
    * }
-   *
    * }
* * @param destinationDataset the user-defined id of the destination dataset @@ -326,26 +338,24 @@ public Job copy(String destinationDataset, String destinationTable, JobOption... * Starts a BigQuery Job to copy the current table to the provided destination table. Returns the * started {@link Job} object. * - *

Example copying a table to a destination table referenced by table ID. + *

Example copying the table to a destination table. *

 {@code
    * String dataset = "my_dataset";
-   * String tableName = "copy_destination";
+   * String tableName = "my_destination_table";
    * TableId destinationId = TableId.of(dataset, tableName);
    * JobOption options = JobOption.fields(JobField.STATUS, JobField.USER_EMAIL);
-   *
    * Job job = table.copy(destinationId, options);
-   *
    * // Wait for the job to complete.
    * try {
    *   Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
-   *       WaitForOption.timeout(60, TimeUnit.SECONDS));
+   *       WaitForOption.timeout(3, TimeUnit.MINUTES));
    *   if (completedJob != null && completedJob.status().error() == null) {
    *     // Job completed successfully.
    *   } else {
    *     // Handle error case.
    *   }
    * } catch (InterruptedException | TimeoutException e) {
-   *   // Handle interrupted wait.
+   *   // Handle interrupted wait
    * }
    * }
* @@ -366,20 +376,19 @@ public Job copy(TableId destinationTable, JobOption... options) *

Example extracting data to single Google Cloud Storage file. *

 {@code
    * String format = "CSV";
-   * String gcsUrl = "gs://myapp.appspot.com/filename.csv";
+   * String gcsUrl = "gs://my_bucket/filename.csv";
    * Job job = table.extract(format, gcsUrl);
-   *
-   * // Wait for the job to complete.
+   * // Wait for the job to complete
    * try {
    *   Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
-   *       WaitForOption.timeout(60, TimeUnit.SECONDS));
+   *       WaitForOption.timeout(3, TimeUnit.MINUTES));
    *   if (completedJob != null && completedJob.status().error() == null) {
-   *     // Job completed successfully.
+   *     // Job completed successfully
    *   } else {
-   *     // Handle error case.
+   *     // Handle error case
    *   }
    * } catch (InterruptedException | TimeoutException e) {
-   *   // Handle interrupted wait.
+   *   // Handle interrupted wait
    * }
    * }
* @@ -398,28 +407,26 @@ public Job extract(String format, String destinationUri, JobOption... options) * Starts a BigQuery Job to extract the current table to the provided destination URIs. Returns * the started {@link Job} object. * - *

Example extracting data to a list of Google Cloud Storage files. + *

Example of partitioning data to a list of Google Cloud Storage files. *

 {@code
    * String format = "CSV";
-   * String gcsUrl1 = "gs://myapp.appspot.com/PartitionA_*.csv";
-   * String gcsUrl2 = "gs://myapp.appspot.com/PartitionB_*.csv";
+   * String gcsUrl1 = "gs://my_bucket/PartitionA_*.csv";
+   * String gcsUrl2 = "gs://my_bucket/PartitionB_*.csv";
    * List destinationUris = new ArrayList<>();
    * destinationUris.add(gcsUrl1);
    * destinationUris.add(gcsUrl2);
-   *
    * Job job = table.extract(format, destinationUris);
-   *
-   * // Wait for the job to complete.
+   * // Wait for the job to complete
    * try {
    *   Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
-   *       WaitForOption.timeout(60, TimeUnit.SECONDS));
+   *       WaitForOption.timeout(3, TimeUnit.MINUTES));
    *   if (completedJob != null && completedJob.status().error() == null) {
-   *     // Job completed successfully.
+   *     // Job completed successfully
    *   } else {
-   *     // Handle error case.
+   *     // Handle error case
    *   }
    * } catch (InterruptedException | TimeoutException e) {
-   *   // Handle interrupted wait.
+   *   // Handle interrupted wait
    * }
    * }
* @@ -442,20 +449,19 @@ public Job extract(String format, List destinationUris, JobOption... opt * *

Example loading data from a single Google Cloud Storage file. *

 {@code
-   * String sourceUri = "gs://myapp.appspot.com/filename.csv";
+   * String sourceUri = "gs://my_bucket/filename.csv";
    * Job job = table.load(FormatOptions.csv(), sourceUri);
-   *
-   * // Wait for the job to complete.
+   * // Wait for the job to complete
    * try {
    *   Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
-   *       WaitForOption.timeout(60, TimeUnit.SECONDS));
+   *       WaitForOption.timeout(3, TimeUnit.MINUTES));
    *   if (completedJob != null && completedJob.status().error() == null) {
-   *     // Job completed successfully.
+   *     // Job completed successfully
    *   } else {
-   *     // Handle error case.
+   *     // Handle error case
    *   }
    * } catch (InterruptedException | TimeoutException e) {
-   *   // Handle interrupted wait.
+   *   // Handle interrupted wait
    * }
    * }
* @@ -476,25 +482,23 @@ public Job load(FormatOptions format, String sourceUri, JobOption... options) * *

Example loading data from a list of Google Cloud Storage files. *

 {@code
-   * String gcsUrl1 = "gs://myapp.appspot.com/PartitionA_000000000000.csv";
-   * String gcsUrl2 = "gs://myapp.appspot.com/PartitionB_000000000000.csv";
+   * String gcsUrl1 = "gs://my_bucket/filename1.csv";
+   * String gcsUrl2 = "gs://my_bucket/filename2.csv";
    * List sourceUris = new ArrayList<>();
    * sourceUris.add(gcsUrl1);
    * sourceUris.add(gcsUrl2);
-   *
    * Job job = table.load(FormatOptions.csv(), sourceUris);
-   *
-   * // Wait for the job to complete.
+   * // Wait for the job to complete
    * try {
    *   Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
-   *       WaitForOption.timeout(60, TimeUnit.SECONDS));
+   *       WaitForOption.timeout(3, TimeUnit.MINUTES));
    *   if (completedJob != null && completedJob.status().error() == null) {
-   *     // Job completed successfully.
+   *     // Job completed successfully
    *   } else {
-   *     // Handle error case.
+   *     // Handle error case
    *   }
    * } catch (InterruptedException | TimeoutException e) {
-   *   // Handle interrupted wait.
+   *   // Handle interrupted wait
    * }
    * }
* diff --git a/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/TableSnippets.java b/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/TableSnippets.java index 8f9e0d46bc91..c169f6badf58 100644 --- a/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/TableSnippets.java +++ b/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/TableSnippets.java @@ -14,14 +14,13 @@ * limitations under the License. */ -package com.google.cloud.examples.bigquery.snippets; +/* + * EDITING INSTRUCTIONS + * This file is referenced in Table’s javadoc. Any change to this file should be reflected in + * Table’s javadoc. + */ -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; +package com.google.cloud.examples.bigquery.snippets; import com.google.cloud.Page; import com.google.cloud.WaitForOption; @@ -39,12 +38,20 @@ import com.google.cloud.bigquery.Table; import com.google.cloud.bigquery.TableId; -/* - * EDITING INSTRUCTIONS - * This file is referenced in Table’s javadoc. Any change to this file should be reflected in - * Table’s javadoc. +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + + +/** + * This class contains a number of snippets for the {@link Table} class. */ public class TableSnippets { + private final Table table; public TableSnippets(Table table) { @@ -52,55 +59,67 @@ public TableSnippets(Table table) { } /** - * Example of ensuring that a table exists. + * Example of checking if the table exists. */ // [TARGET exists()] - public void checkExists() { - // [START checkExists] - if (!table.exists()) { - throw new IllegalArgumentException("Table does not exist."); + public boolean exists() { + // [START exists] + boolean exists = table.exists(); + if (exists) { + // the table exists + } else { + // the table was not found } - // [END checkExists] + // [END exists] + return exists; } /** - * Example of fetching a table's latest information, specifying particular table field options. + * Example of fetching the table's latest information, specifying particular table fields to + * get. */ // [TARGET reload(TableOption...)] // [VARIABLE TableField.LAST_MODIFIED_TIME] // [VARIABLE TableField.NUM_ROWS] public Table reloadTableWithFields(TableField field1, TableField field2) { // [START reloadTableWithFields] - Table reloaded = table.reload(TableOption.fields(field1, field2)); + Table latestTable = table.reload(TableOption.fields(field1, field2)); + if (latestTable == null) { + // the table was not found + } // [END reloadTableWithFields] - return reloaded; + return latestTable; } /** - * Example of updating a table's information, specifying particular table field options. + * Example of updating the table's information. */ // [TARGET update(TableOption...)] - // [VARIABLE TableField.LAST_MODIFIED_TIME] - // [VARIABLE TableField.NUM_ROWS] - public Table updateTableWithFields(TableField field1, TableField field2) { - // [START updateTableWithFields] - Table updated = table.update(TableOption.fields(field1, field2)); - // [END updateTableWithFields] - return updated; + public Table update() { + // [START update] + Table updatedTable = table.toBuilder().description("new description").build().update(); + // [END update] + return updatedTable; } /** - * Example of deleting a table. + * Example of deleting the table. */ // [TARGET delete()] - public void delete() { + public boolean delete() { // [START delete] - table.delete(); + boolean deleted = table.delete(); + if (deleted) { + // the table was deleted + } else { + // the table was not found + } // [END delete] + return deleted; } /** - * Example of inserting rows into a table. + * Example of inserting rows into the table. */ // [TARGET insert(Iterable)] // [VARIABLE "rowId1"] @@ -123,7 +142,7 @@ public InsertAllResponse insert(String rowId1, String rowId2) { } /** - * Example of inserting rows into a table which ignores invalid rows. + * Example of inserting rows into the table, ignoring invalid rows. */ // [TARGET insert(Iterable, boolean, boolean)] // [VARIABLE "rowId1"] @@ -146,19 +165,23 @@ public InsertAllResponse insertWithParams(String rowId1, String rowId2) { } /** - * Example of getting a paginated list of rows in a table. + * Example of listing rows in the table. */ // [TARGET list(TableDataListOption...)] public Page> list() { // [START list] Page> page = table.list(TableDataListOption.pageSize(100)); - // do something with page + Iterator> rowIterator = page.iterateAll(); + while (rowIterator.hasNext()) { + List row = rowIterator.next(); + // do something with the row + } // [END list] return page; } /** - * Example of copying a table to a destination table and dataset referenced by name. + * Example of copying the table to a destination table. */ // [TARGET copy(String, String, JobOption...)] // [VARIABLE "my_dataset"] @@ -166,79 +189,73 @@ public Page> list() { public Job copy(String datasetName, String tableName) { // [START copy] Job job = table.copy(datasetName, tableName); - // Wait for the job to complete. try { Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS), - WaitForOption.timeout(60, TimeUnit.SECONDS)); + WaitForOption.timeout(3, TimeUnit.MINUTES)); if (completedJob != null && completedJob.status().error() == null) { - // Job completed successfully. + // Job completed successfully } else { - // Handle error case. + // Handle error case } } catch (InterruptedException | TimeoutException e) { - // Handle interrupted wait. + // Handle interrupted wait } - // [END copy] return job; } /** - * Example copying a table to a destination table referenced by table ID. + * Example copying the table to a destination table. */ // [TARGET copy(TableId, JobOption...)] // [VARIABLE "my_dataset"] - // [VARIABLE "copy_destination"] + // [VARIABLE "my_destination_table"] public Job copyTableId(String dataset, String tableName) throws BigQueryException { // [START copyTableId] TableId destinationId = TableId.of(dataset, tableName); JobOption options = JobOption.fields(JobField.STATUS, JobField.USER_EMAIL); - Job job = table.copy(destinationId, options); - // Wait for the job to complete. try { Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS), - WaitForOption.timeout(60, TimeUnit.SECONDS)); + WaitForOption.timeout(3, TimeUnit.MINUTES)); if (completedJob != null && completedJob.status().error() == null) { // Job completed successfully. } else { // Handle error case. } } catch (InterruptedException | TimeoutException e) { - // Handle interrupted wait. + // Handle interrupted wait } // [END copyTableId] return job; } /** - * Example extracting data to a list of Google Cloud Storage files. + * Example of partitioning data to a list of Google Cloud Storage files. */ // [TARGET extract(String, List, JobOption...)] // [VARIABLE "CSV"] - // [VARIABLE "gs://myapp.appspot.com/PartitionA_*.csv"] - // [VARIABLE "gs://myapp.appspot.com/PartitionB_*.csv"] + // [VARIABLE "gs://my_bucket/PartitionA_*.csv"] + // [VARIABLE "gs://my_bucket/PartitionB_*.csv"] public Job extractList(String format, String gcsUrl1, String gcsUrl2) { // [START extractList] List destinationUris = new ArrayList<>(); destinationUris.add(gcsUrl1); destinationUris.add(gcsUrl2); - Job job = table.extract(format, destinationUris); - - // Wait for the job to complete. + // Wait for the job to complete try { Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS), - WaitForOption.timeout(60, TimeUnit.SECONDS)); + WaitForOption.timeout(3, TimeUnit.MINUTES)); if (completedJob != null && completedJob.status().error() == null) { - // Job completed successfully. + // Job completed successfully } else { - // Handle error case. + // Handle error case } } catch (InterruptedException | TimeoutException e) { - // Handle interrupted wait. + // Handle interrupted wait } // [END extractList] return job; @@ -249,22 +266,21 @@ public Job extractList(String format, String gcsUrl1, String gcsUrl2) { */ // [TARGET extract(String, String, JobOption...)] // [VARIABLE "CSV"] - // [VARIABLE "gs://myapp.appspot.com/filename.csv"] + // [VARIABLE "gs://my_bucket/filename.csv"] public Job extractSingle(String format, String gcsUrl) { // [START extractSingle] Job job = table.extract(format, gcsUrl); - - // Wait for the job to complete. + // Wait for the job to complete try { Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS), - WaitForOption.timeout(60, TimeUnit.SECONDS)); + WaitForOption.timeout(3, TimeUnit.MINUTES)); if (completedJob != null && completedJob.status().error() == null) { - // Job completed successfully. + // Job completed successfully } else { - // Handle error case. + // Handle error case } } catch (InterruptedException | TimeoutException e) { - // Handle interrupted wait. + // Handle interrupted wait } // [END extractSingle] return job; @@ -274,27 +290,25 @@ public Job extractSingle(String format, String gcsUrl) { * Example loading data from a list of Google Cloud Storage files. */ // [TARGET load(FormatOptions, List, JobOption...)] - // [VARIABLE "gs://myapp.appspot.com/PartitionA_000000000000.csv"] - // [VARIABLE "gs://myapp.appspot.com/PartitionB_000000000000.csv"] + // [VARIABLE "gs://my_bucket/filename1.csv"] + // [VARIABLE "gs://my_bucket/filename2.csv"] public Job loadList(String gcsUrl1, String gcsUrl2) { // [START loadList] List sourceUris = new ArrayList<>(); sourceUris.add(gcsUrl1); sourceUris.add(gcsUrl2); - Job job = table.load(FormatOptions.csv(), sourceUris); - - // Wait for the job to complete. + // Wait for the job to complete try { Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS), - WaitForOption.timeout(60, TimeUnit.SECONDS)); + WaitForOption.timeout(3, TimeUnit.MINUTES)); if (completedJob != null && completedJob.status().error() == null) { - // Job completed successfully. + // Job completed successfully } else { - // Handle error case. + // Handle error case } } catch (InterruptedException | TimeoutException e) { - // Handle interrupted wait. + // Handle interrupted wait } // [END loadList] return job; @@ -304,22 +318,21 @@ public Job loadList(String gcsUrl1, String gcsUrl2) { * Example loading data from a single Google Cloud Storage file. */ // [TARGET load(FormatOptions, String, JobOption...)] - // [VARIABLE "gs://myapp.appspot.com/filename.csv"] + // [VARIABLE "gs://my_bucket/filename.csv"] public Job loadSingle(String sourceUri) { // [START loadSingle] Job job = table.load(FormatOptions.csv(), sourceUri); - - // Wait for the job to complete. + // Wait for the job to complete try { Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS), - WaitForOption.timeout(60, TimeUnit.SECONDS)); + WaitForOption.timeout(3, TimeUnit.MINUTES)); if (completedJob != null && completedJob.status().error() == null) { - // Job completed successfully. + // Job completed successfully } else { - // Handle error case. + // Handle error case } } catch (InterruptedException | TimeoutException e) { - // Handle interrupted wait. + // Handle interrupted wait } // [END loadSingle] return job; diff --git a/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITTableSnippets.java b/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITTableSnippets.java index e2b313112820..6687c653d014 100644 --- a/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITTableSnippets.java +++ b/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITTableSnippets.java @@ -18,98 +18,101 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; -import java.util.List; -import java.util.Set; -import java.util.logging.Logger; - -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; - import com.google.cloud.Page; import com.google.cloud.bigquery.BigQuery; -import com.google.cloud.bigquery.BigQuery.DatasetDeleteOption; import com.google.cloud.bigquery.BigQuery.TableDataListOption; import com.google.cloud.bigquery.BigQuery.TableField; -import com.google.cloud.bigquery.BigQueryOptions; import com.google.cloud.bigquery.DatasetInfo; import com.google.cloud.bigquery.Field; import com.google.cloud.bigquery.Field.Type; import com.google.cloud.bigquery.FieldValue; import com.google.cloud.bigquery.InsertAllResponse; +import com.google.cloud.bigquery.Job; import com.google.cloud.bigquery.Schema; import com.google.cloud.bigquery.StandardTableDefinition; import com.google.cloud.bigquery.Table; import com.google.cloud.bigquery.TableId; import com.google.cloud.bigquery.TableInfo; +import com.google.cloud.bigquery.testing.RemoteBigQueryHelper; +import com.google.cloud.storage.BucketInfo; +import com.google.cloud.storage.Storage; +import com.google.cloud.storage.testing.RemoteStorageHelper; import com.google.common.base.Function; -import com.google.common.base.Objects; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.Timeout; + +import java.util.List; +import java.util.Set; + /** * Integration tests for {@link TableSnippets}. */ public class ITTableSnippets { + private static final String BASE_TABLE_NAME = "my_table"; - private static final String DATASET_NAME = "my_dataset"; - private static final String COPY_DATASET_NAME = "my_copy_dataset"; - private static final Value ROW1 = new Value("value1", true); - private static final Value ROW2 = new Value("value2", false); - private static final Logger log = Logger.getLogger(ITTableSnippets.class.getName()); + private static final String DATASET_NAME = RemoteBigQueryHelper.generateDatasetName(); + private static final String COPY_DATASET_NAME = RemoteBigQueryHelper.generateDatasetName(); + private static final String BUCKET_NAME = RemoteStorageHelper.generateBucketName(); + private static final Schema SCHEMA = + Schema.of(Field.of("stringField", Type.string()), Field.of("booleanField", Type.bool())); + private static final List ROW1 = ImmutableList.of("value1", true); + private static final List ROW2 = ImmutableList.of("value2", false); + private static final String DOOMED_TABLE_NAME = "doomed_table"; + private static final TableId DOOMED_TABLE_ID = TableId.of(DATASET_NAME, DOOMED_TABLE_NAME); private static BigQuery bigquery; + private static Storage storage; + private static int nextTableNumber; + private Table table; private TableSnippets tableSnippets; - private static final String DOOMED_TABLE_NAME = "doomed_table"; - private static final String DOOMED_DATASET_NAME = "doomed_dataset"; - public static final TableId DOOMED_TABLE_ID = TableId.of(DOOMED_DATASET_NAME, DOOMED_TABLE_NAME); - - private static Table doomedTable; - private static TableSnippets doomedTableSnippets; - - private static int nextTableNumber; + @Rule + public Timeout globalTimeout = Timeout.seconds(300); @BeforeClass public static void beforeClass() { - bigquery = BigQueryOptions.defaultInstance().service(); + bigquery = RemoteBigQueryHelper.create().options().service(); bigquery.create(DatasetInfo.builder(DATASET_NAME).build()); bigquery.create(DatasetInfo.builder(COPY_DATASET_NAME).build()); - bigquery.create(DatasetInfo.builder(DOOMED_DATASET_NAME).build()); + storage = RemoteStorageHelper.create().options().service(); + storage.create(BucketInfo.of(BUCKET_NAME)); } @Before public void before() { ++nextTableNumber; StandardTableDefinition.Builder builder = StandardTableDefinition.builder(); - builder.schema( - Schema.of(Field.of("stringField", Type.string()), Field.of("booleanField", Type.bool()))); + builder.schema(SCHEMA); table = bigquery.create(TableInfo.of(getTableId(), builder.build())); bigquery.create(TableInfo.of(getCopyTableId(), builder.build())); tableSnippets = new TableSnippets(table); - - doomedTable = bigquery.create(TableInfo.of(DOOMED_TABLE_ID, builder.build())); - doomedTableSnippets = new TableSnippets(doomedTable); } @After public void after() { bigquery.delete(getTableId()); bigquery.delete(getCopyTableId()); - bigquery.delete(DOOMED_TABLE_ID); } @AfterClass public static void afterClass() { - bigquery.delete(DATASET_NAME, DatasetDeleteOption.deleteContents()); - bigquery.delete(COPY_DATASET_NAME, DatasetDeleteOption.deleteContents()); - bigquery.delete(DOOMED_DATASET_NAME, DatasetDeleteOption.deleteContents()); + RemoteBigQueryHelper.forceDelete(bigquery, DATASET_NAME); + RemoteBigQueryHelper.forceDelete(bigquery, COPY_DATASET_NAME); + RemoteStorageHelper.forceDelete(storage, BUCKET_NAME); } private String getTableName() { @@ -129,32 +132,34 @@ private TableId getCopyTableId() { } @Test - public void testCheckExists() { - log.info("testCheckExists"); - tableSnippets.checkExists(); + public void testExists() { + assertTrue(tableSnippets.exists()); } @Test public void testReloadTableWithFields() { - log.info("testReloadTableWithFields"); - tableSnippets.reloadTableWithFields(TableField.LAST_MODIFIED_TIME, TableField.NUM_ROWS); + Table latestTable = + tableSnippets.reloadTableWithFields(TableField.LAST_MODIFIED_TIME, TableField.NUM_ROWS); + assertNotNull(latestTable); + assertNotNull(latestTable.lastModifiedTime()); } @Test - public void testUpdateTableWithFields() { - log.info("testUpdateTableWithFields"); - tableSnippets.updateTableWithFields(TableField.LAST_MODIFIED_TIME, TableField.NUM_ROWS); + public void testUpdate() { + Table updatedTable = tableSnippets.update(); + assertEquals("new description", updatedTable.description()); } @Test public void testDelete() { - log.info("testDelete"); - doomedTableSnippets.delete(); + Table doomedTable = + bigquery.create(TableInfo.of(DOOMED_TABLE_ID, StandardTableDefinition.of(SCHEMA))); + TableSnippets doomedTableSnippets = new TableSnippets(doomedTable); + assertTrue(doomedTableSnippets.delete()); } @Test - public void testInsert() { - log.info("testInsert"); + public void testInsert() throws InterruptedException { InsertAllResponse response = tableSnippets.insert("row1", "row2"); assertFalse(response.hasErrors()); verifyTestRows(table); @@ -169,11 +174,11 @@ public void testInsertParams() throws InterruptedException { Thread.sleep(500); rows = ImmutableList.copyOf(tableSnippets.list().values()); } - Set values = - FluentIterable.from(rows).transform(new Function, Value>() { + Set> values = + FluentIterable.from(rows).transform(new Function, List>() { @Override - public Value apply(List row) { - return new Value(row.get(0).stringValue(), row.get(1).booleanValue()); + public List apply(List row) { + return ImmutableList.of(row.get(0).stringValue(), row.get(1).booleanValue()); } }).toSet(); assertEquals(ImmutableSet.of(ROW2), values); @@ -201,95 +206,44 @@ public void testCopy() { @Test public void testCopyTableId() { - log.info("testCopyTableId"); - tableSnippets.copyTableId(COPY_DATASET_NAME, getCopyTableName()); - } - - @Test - public void testExtractList() { - log.info("testExtractList"); - String projectId = bigquery.options().projectId(); - String gcsFile1 = "gs://" + projectId + ".appspot.com/extractTestA_*.csv"; - String gcsFile2 = "gs://" + projectId + ".appspot.com/extractTestB_*.csv"; - tableSnippets.extractList("CSV", gcsFile1, gcsFile2); - } - - @Test - public void testExtractSingle() { - log.info("testExtractSingle"); - String projectId = bigquery.options().projectId(); - String gcsFile = "gs://" + projectId + ".appspot.com/extractTest.csv"; - tableSnippets.extractSingle("CSV", gcsFile); + Job copyJob = tableSnippets.copyTableId(COPY_DATASET_NAME, getCopyTableName()); + assertSuccessful(copyJob); } @Test - public void testLoadList() { - log.info("testLoadList"); - String projectId = bigquery.options().projectId(); - String gcsFile1 = "gs://" + projectId + ".appspot.com/loadTest1.csv"; - String gcsFile2 = "gs://" + projectId + ".appspot.com/loadTest2.csv"; - - // Before we can load, we should make sure those files exist. - tableSnippets.extractSingle("CSV", gcsFile1); - tableSnippets.extractSingle("CSV", gcsFile2); - - tableSnippets.loadList(gcsFile1, gcsFile2); + public void testExtractAndLoadList() { + String gcsFile1 = "gs://" + BUCKET_NAME + "/extractTestA_*.csv"; + String gcsFile2 = "gs://" + BUCKET_NAME + "/extractTestB_*.csv"; + Job extractJob = tableSnippets.extractList("CSV", gcsFile1, gcsFile2); + gcsFile1 = gcsFile1.replace("*", "000000000000"); + gcsFile2 = gcsFile2.replace("*", "000000000000"); + assertSuccessful(extractJob); + Job loadJob = tableSnippets.loadList(gcsFile1, gcsFile2); + assertSuccessful(loadJob); } @Test - public void testLoadSingle() { - log.info("testLoadSingle"); - String projectId = bigquery.options().projectId(); - String gcsFile = "gs://" + projectId + ".appspot.com/loadSingle.csv"; - - // Before we can load, we should make sure the file exists. - tableSnippets.extractSingle("CSV", gcsFile); - - tableSnippets.loadSingle(gcsFile); - } - - private static class Value { - final String stringField; - final boolean booleanField; - - Value(String stringField, boolean booleanField) { - this.stringField = stringField; - this.booleanField = booleanField; - } - - @Override - public boolean equals(Object obj) { - if (obj instanceof Value) { - Value o = (Value) obj; - return Objects.equal(stringField, o.stringField) && booleanField == o.booleanField; - } - return false; - } - - @Override - public int hashCode() { - return Objects.hashCode(stringField, booleanField); - } - - @Override - public String toString() { - return ""; - } + public void testExtractAndLoadSingle() { + String gcsFile = "gs://" + BUCKET_NAME + "/extractTest.csv"; + Job extractJob = tableSnippets.extractSingle("CSV", gcsFile); + assertSuccessful(extractJob); + Job loadJob = tableSnippets.loadSingle(gcsFile); + assertSuccessful(loadJob); } /** * Verifies that the given table has the rows inserted by InsertTestRows(). * - * @param checkTable The table to query. + * @param checkTable the table to query */ - private void verifyTestRows(Table checkTable) { + private void verifyTestRows(Table checkTable) throws InterruptedException { List> rows = waitForTableRows(checkTable, 2); // Verify that the table data matches what it's supposed to. - Set values = - FluentIterable.from(rows).transform(new Function, Value>() { + Set> values = + FluentIterable.from(rows).transform(new Function, List>() { @Override - public Value apply(List row) { - return new Value(row.get(0).stringValue(), row.get(1).booleanValue()); + public List apply(List row) { + return ImmutableList.of(row.get(0).stringValue(), row.get(1).booleanValue()); } }).toSet(); assertEquals(ImmutableSet.of(ROW2, ROW1), values); @@ -299,27 +253,25 @@ public Value apply(List row) { * Waits for a specified number of rows to appear in the given table. This is used by * verifyTestRows to wait for data to appear before verifying. * - * @param checkTable - * @param numRows - * @return The rows from the table. + * @param checkTable the table to query + * @param numRows the expected number of rows + * @return the rows from the table */ - private List> waitForTableRows(Table checkTable, int numRows) { + private List> waitForTableRows(Table checkTable, int numRows) + throws InterruptedException { // Wait for the data to appear. Page> page = checkTable.list(TableDataListOption.pageSize(100)); List> rows = ImmutableList.copyOf(page.values()); - int numSleeps = 0; while (rows.size() != numRows) { - assertTrue(numSleeps < 10); - log.info("Sleeping and waiting for " + numRows + " test rows to appear (currently " - + rows.size() + ")..."); - try { - ++numSleeps; - Thread.sleep(5000); - } catch (InterruptedException e) { - } + Thread.sleep(1000); page = checkTable.list(TableDataListOption.pageSize(100)); rows = ImmutableList.copyOf(page.values()); } return rows; } + + private void assertSuccessful(Job job) { + assertTrue(job.isDone()); + assertNull(job.status().error()); + } }