listDatasets(String projectId, DatasetListOption... options);
@@ -496,6 +572,17 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) {
/**
* Deletes the requested dataset.
*
+ * Example of deleting a dataset from its id, even if non-empty.
+ *
{@code
+ * String datasetName = "my_dataset_name";
+ * Boolean deleted = bigquery.delete(datasetName, DatasetDeleteOption.deleteContents());
+ * if (deleted) {
+ * // the dataset was deleted
+ * } else {
+ * // the dataset was not found
+ * }
+ * }
+ *
* @return {@code true} if dataset was deleted, {@code false} if it was not found
* @throws BigQueryException upon failure
*/
@@ -504,6 +591,19 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) {
/**
* Deletes the requested dataset.
*
+ * Example of deleting a dataset, even if non-empty.
+ *
{@code
+ * String projectId = "my_project_id";
+ * String datasetName = "my_dataset_name";
+ * DatasetId datasetId = DatasetId.of(projectId, datasetName);
+ * Boolean deleted = bigquery.delete(datasetId, DatasetDeleteOption.deleteContents());
+ * if (deleted) {
+ * // the dataset was deleted
+ * } else {
+ * // the dataset was not found
+ * }
+ * }
+ *
* @return {@code true} if dataset was deleted, {@code false} if it was not found
* @throws BigQueryException upon failure
*/
@@ -512,6 +612,18 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) {
/**
* Deletes the requested table.
*
+ * Example of deleting a table.
+ *
{@code
+ * String datasetName = "my_dataset_name";
+ * String tableName = "my_table_name";
+ * Boolean deleted = bigquery.delete(datasetName, tableName);
+ * if (deleted) {
+ * // the table was deleted
+ * } else {
+ * // the table was not found
+ * }
+ * }
+ *
* @return {@code true} if table was deleted, {@code false} if it was not found
* @throws BigQueryException upon failure
*/
@@ -520,6 +632,20 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) {
/**
* Deletes the requested table.
*
+ * Example of deleting a table.
+ *
{@code
+ * String projectId = "my_project_id";
+ * String datasetName = "my_dataset_name";
+ * String tableName = "my_table_name";
+ * TableId tableId = TableId.of(projectId, datasetName, tableName);
+ * Boolean deleted = bigquery.delete(tableId);
+ * if (deleted) {
+ * // the table was deleted
+ * } else {
+ * // the table was not found
+ * }
+ * }
+ *
* @return {@code true} if table was deleted, {@code false} if it was not found
* @throws BigQueryException upon failure
*/
@@ -528,6 +654,15 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) {
/**
* Updates dataset information.
*
+ * Example of updating a dataset by changing its friendly name.
+ *
{@code
+ * String datasetName = "my_dataset_name";
+ * String newFriendlyName = "some_new_friendly_name";
+ * Dataset oldDataset = bigquery.getDataset(datasetName);
+ * DatasetInfo datasetInfo = oldDataset.toBuilder().friendlyName(newFriendlyName).build();
+ * Dataset newDataset = bigquery.update(datasetInfo);
+ * }
+ *
* @throws BigQueryException upon failure
*/
Dataset update(DatasetInfo dataset, DatasetOption... options);
@@ -535,6 +670,16 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) {
/**
* Updates table information.
*
+ * Example of updating a table by changing its friendly name.
+ *
{@code
+ * String datasetName = "my_dataset_name";
+ * String tableName = "my_table_name";
+ * String newFriendlyName = "new_friendly_name";
+ * Table oldTable = bigquery.getTable(datasetName, tableName);
+ * TableInfo tableInfo = oldTable.toBuilder().friendlyName(newFriendlyName).build();
+ * Table newTable = bigquery.update(tableInfo);
+ * }
+ *
* @throws BigQueryException upon failure
*/
Table update(TableInfo table, TableOption... options);
@@ -542,6 +687,13 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) {
/**
* Returns the requested table or {@code null} if not found.
*
+ * Example of getting a table.
+ *
{@code
+ * String datasetName = "my_dataset_name";
+ * String tableName = "my_table_name";
+ * Table table = bigquery.getTable(datasetName, tableName);
+ * }
+ *
* @throws BigQueryException upon failure
*/
Table getTable(String datasetId, String tableId, TableOption... options);
@@ -549,6 +701,15 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) {
/**
* Returns the requested table or {@code null} if not found.
*
+ * Example of getting a table.
+ *
{@code
+ * String projectId = "my_project_id";
+ * String datasetName = "my_dataset_name";
+ * String tableName = "my_table_name";
+ * TableId tableId = TableId.of(projectId, datasetName, tableName);
+ * Table table = bigquery.getTable(tableId);
+ * }
+ *
* @throws BigQueryException upon failure
*/
Table getTable(TableId tableId, TableOption... options);
@@ -560,6 +721,17 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) {
* {@link #getTable(TableId, TableOption...)} or
* {@link #getTable(String, String, TableOption...)}.
*
+ * Example of listing the tables in a dataset, specifying the page size.
+ *
{@code
+ * String datasetName = "my_dataset_name";
+ * Page
tables = bigquery.listTables(datasetName, TableListOption.pageSize(100));
+ * Iterator tableIterator = tables.iterateAll();
+ * while (tableIterator.hasNext()) {
+ * Table table = tableIterator.next();
+ * // do something with the table
+ * }
+ * }
+ *
* @throws BigQueryException upon failure
*/
Page listTables(String datasetId, TableListOption... options);
@@ -571,6 +743,19 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) {
* {@link #getTable(TableId, TableOption...)} or
* {@link #getTable(String, String, TableOption...)}.
*
+ * Example of listing the tables in a dataset.
+ *
{@code
+ * String projectId = "my_project_id";
+ * String datasetName = "my_dataset_name";
+ * DatasetId datasetId = DatasetId.of(projectId, datasetName);
+ * Page
tables = bigquery.listTables(datasetId, TableListOption.pageSize(100));
+ * Iterator tableIterator = tables.iterateAll();
+ * while (tableIterator.hasNext()) {
+ * Table table = tableIterator.next();
+ * // do something with the table
+ * }
+ * }
+ *
* @throws BigQueryException upon failure
*/
Page listTables(DatasetId datasetId, TableListOption... options);
@@ -578,6 +763,28 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) {
/**
* Sends an insert all request.
*
+ * Example of inserting rows into a table without running a load job.
+ *
{@code
+ * String datasetName = "my_dataset_name";
+ * String tableName = "my_table_name";
+ * TableId tableId = TableId.of(datasetName, tableName);
+ * // Values of the row to insert
+ * Map rowContent = new HashMap<>();
+ * rowContent.put("booleanField", true);
+ * // Bytes are passed in base64
+ * rowContent.put("bytesField", "DQ4KDQ==");
+ * InsertAllResponse response = bigquery.insertAll(InsertAllRequest.builder(tableId)
+ * .addRow("rowId", rowContent)
+ * // More rows can be added in the same RPC by invoking .addRow() on the builder
+ * .build());
+ * if (response.hasErrors()) {
+ * // If any of the insertions failed, this lets you inspect the errors
+ * for (Entry> entry : response.insertErrors().entrySet()) {
+ * // inspect row error
+ * }
+ * }
+ * }
+ *
* @throws BigQueryException upon failure
*/
InsertAllResponse insertAll(InsertAllRequest request);
@@ -585,6 +792,19 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) {
/**
* Lists the table's rows.
*
+ * Example of listing table rows, specifying the page size.
+ *
{@code
+ * String datasetName = "my_dataset_name";
+ * String tableName = "my_table_name";
+ * Page> tableData =
+ * bigquery.listTableData(datasetName, tableName, TableDataListOption.pageSize(100));
+ * Iterator> rowIterator = tableData.iterateAll();
+ * while (rowIterator.hasNext()) {
+ * List row = rowIterator.next();
+ * // do something with the row
+ * }
+ * }
+ *
* @throws BigQueryException upon failure
*/
Page> listTableData(String datasetId, String tableId,
@@ -593,6 +813,20 @@ Page> listTableData(String datasetId, String tableId,
/**
* Lists the table's rows.
*
+ * Example of listing table rows, specifying the page size.
+ *
{@code
+ * String datasetName = "my_dataset_name";
+ * String tableName = "my_table_name";
+ * TableId tableIdObject = TableId.of(datasetName, tableName);
+ * Page> tableData =
+ * bigquery.listTableData(tableIdObject, TableDataListOption.pageSize(100));
+ * Iterator> rowIterator = tableData.iterateAll();
+ * while (rowIterator.hasNext()) {
+ * List row = rowIterator.next();
+ * // do something with the row
+ * }
+ * }
+ *
* @throws BigQueryException upon failure
*/
Page> listTableData(TableId tableId, TableDataListOption... options);
@@ -600,6 +834,15 @@ Page> listTableData(String datasetId, String tableId,
/**
* Returns the requested job or {@code null} if not found.
*
+ * Example of getting a job.
+ *
{@code
+ * String jobName = "my_job_name";
+ * Job job = bigquery.getJob(jobName);
+ * if (job == null) {
+ * // job was not found
+ * }
+ * }
+ *
* @throws BigQueryException upon failure
*/
Job getJob(String jobId, JobOption... options);
@@ -607,6 +850,16 @@ Page> listTableData(String datasetId, String tableId,
/**
* Returns the requested job or {@code null} if not found.
*
+ * Example of getting a job.
+ *
{@code
+ * String jobName = "my_job_name";
+ * JobId jobIdObject = JobId.of(jobName);
+ * Job job = bigquery.getJob(jobIdObject);
+ * if (job == null) {
+ * // job was not found
+ * }
+ * }
+ *
* @throws BigQueryException upon failure
*/
Job getJob(JobId jobId, JobOption... options);
@@ -614,6 +867,16 @@ Page> listTableData(String datasetId, String tableId,
/**
* Lists the jobs.
*
+ * Example of listing jobs, specifying the page size.
+ *
{@code
+ * Page jobs = bigquery.listJobs(JobListOption.pageSize(100));
+ * Iterator jobIterator = jobs.iterateAll();
+ * while (jobIterator.hasNext()) {
+ * Job job = jobIterator.next();
+ * // do something with the job
+ * }
+ * }
+ *
* @throws BigQueryException upon failure
*/
Page listJobs(JobListOption... options);
@@ -623,6 +886,17 @@ Page> listTableData(String datasetId, String tableId,
* checked using either {@link #getJob(JobId, JobOption...)} or
* {@link #getJob(String, JobOption...)}).
*
+ * Example of cancelling a job.
+ *
{@code
+ * String jobName = "my_job_name";
+ * boolean success = bigquery.cancel(jobName);
+ * if (success) {
+ * // job was cancelled
+ * } else {
+ * // job was not found
+ * }
+ * }
+ *
* @return {@code true} if cancel was requested successfully, {@code false} if the job was not
* found
* @throws BigQueryException upon failure
@@ -634,6 +908,18 @@ Page> listTableData(String datasetId, String tableId,
* checked using either {@link #getJob(JobId, JobOption...)} or
* {@link #getJob(String, JobOption...)}).
*
+ * Example of cancelling a job.
+ *
{@code
+ * String jobName = "my_job_name";
+ * JobId jobId = JobId.of(jobName);
+ * boolean success = bigquery.cancel(jobId);
+ * if (success) {
+ * // job was cancelled
+ * } else {
+ * // job was not found
+ * }
+ * }
+ *
* @return {@code true} if cancel was requested successfully, {@code false} if the job was not
* found
* @throws BigQueryException upon failure
@@ -643,6 +929,27 @@ Page> listTableData(String datasetId, String tableId,
/**
* Runs the query associated with the request.
*
+ * Example of running a query.
+ *
{@code
+ * String query = "SELECT unique(corpus) FROM [bigquery-public-data:samples.shakespeare]";
+ * QueryRequest request = QueryRequest.of(query);
+ * QueryResponse response = bigquery.query(request);
+ * // Wait for things to finish
+ * while (!response.jobCompleted()) {
+ * Thread.sleep(1000);
+ * response = bigquery.getQueryResults(response.jobId());
+ * }
+ * if (response.hasErrors()) {
+ * // handle errors
+ * }
+ * QueryResult result = response.result();
+ * Iterator> rowIterator = result.iterateAll();
+ * while (rowIterator.hasNext()) {
+ * List row = rowIterator.next();
+ * // do something with the data
+ * }
+ * }
+ *
* @throws BigQueryException upon failure
*/
QueryResponse query(QueryRequest request);
@@ -650,6 +957,27 @@ Page> listTableData(String datasetId, String tableId,
/**
* Returns results of the query associated with the provided job.
*
+ * Example of getting the results of query.
+ *
{@code
+ * String query = "SELECT unique(corpus) FROM [bigquery-public-data:samples.shakespeare]";
+ * QueryRequest request = QueryRequest.of(query);
+ * QueryResponse response = bigquery.query(request);
+ * // Wait for things to finish
+ * while (!response.jobCompleted()) {
+ * Thread.sleep(1000);
+ * response = bigquery.getQueryResults(response.jobId());
+ * }
+ * if (response.hasErrors()) {
+ * // handle errors
+ * }
+ * QueryResult result = response.result();
+ * Iterator> rowIterator = result.iterateAll();
+ * while (rowIterator.hasNext()) {
+ * List row = rowIterator.next();
+ * // do something with the data
+ * }
+ * }
+ *
* @throws BigQueryException upon failure
*/
QueryResponse getQueryResults(JobId job, QueryResultsOption... options);
@@ -658,6 +986,25 @@ Page> listTableData(String datasetId, String tableId,
* Returns a channel to write data to be inserted into a BigQuery table. Data format and other
* options can be configured using the {@link WriteChannelConfiguration} parameter.
*
+ * Example of creating a channel with which to write to a table.
+ *
{@code
+ * String datasetName = "my_dataset_name";
+ * String tableName = "my_table_name";
+ * String csvData = "StringValue1\nStringValue2\n";
+ * TableId tableId = TableId.of(datasetName, tableName);
+ * WriteChannelConfiguration writeChannelConfiguration =
+ * WriteChannelConfiguration.builder(tableId).formatOptions(FormatOptions.csv()).build();
+ * BaseWriteChannel writer =
+ * bigquery.writer(writeChannelConfiguration);
+ * // Write data to writer
+ * try {
+ * writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
+ * } catch (IOException e) {
+ * // Unable to write data
+ * }
+ * writer.close();
+ * }
+ *
* @throws BigQueryException upon failure
*/
TableDataWriteChannel writer(WriteChannelConfiguration writeChannelConfiguration);
diff --git a/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/BigQuerySnippets.java b/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/BigQuerySnippets.java
new file mode 100644
index 000000000000..d15036aa9471
--- /dev/null
+++ b/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/BigQuerySnippets.java
@@ -0,0 +1,597 @@
+/*
+ * Copyright 2016 Google Inc. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * EDITING INSTRUCTIONS
+ * This file is referenced in BigQuery's javadoc. Any change to this file should be reflected in
+ * BigQuery's javadoc.
+ */
+
+package com.google.cloud.examples.bigquery.snippets;
+
+import com.google.api.client.util.Charsets;
+import com.google.cloud.BaseWriteChannel;
+import com.google.cloud.Page;
+import com.google.cloud.bigquery.BigQuery;
+import com.google.cloud.bigquery.BigQuery.DatasetDeleteOption;
+import com.google.cloud.bigquery.BigQuery.DatasetListOption;
+import com.google.cloud.bigquery.BigQuery.JobListOption;
+import com.google.cloud.bigquery.BigQuery.TableDataListOption;
+import com.google.cloud.bigquery.BigQuery.TableListOption;
+import com.google.cloud.bigquery.BigQueryError;
+import com.google.cloud.bigquery.BigQueryException;
+import com.google.cloud.bigquery.BigQueryOptions;
+import com.google.cloud.bigquery.Dataset;
+import com.google.cloud.bigquery.DatasetId;
+import com.google.cloud.bigquery.DatasetInfo;
+import com.google.cloud.bigquery.Field;
+import com.google.cloud.bigquery.FieldValue;
+import com.google.cloud.bigquery.FormatOptions;
+import com.google.cloud.bigquery.InsertAllRequest;
+import com.google.cloud.bigquery.InsertAllResponse;
+import com.google.cloud.bigquery.Job;
+import com.google.cloud.bigquery.JobConfiguration;
+import com.google.cloud.bigquery.JobId;
+import com.google.cloud.bigquery.JobInfo;
+import com.google.cloud.bigquery.QueryJobConfiguration;
+import com.google.cloud.bigquery.QueryRequest;
+import com.google.cloud.bigquery.QueryResponse;
+import com.google.cloud.bigquery.QueryResult;
+import com.google.cloud.bigquery.Schema;
+import com.google.cloud.bigquery.StandardTableDefinition;
+import com.google.cloud.bigquery.Table;
+import com.google.cloud.bigquery.TableDefinition;
+import com.google.cloud.bigquery.TableId;
+import com.google.cloud.bigquery.TableInfo;
+import com.google.cloud.bigquery.WriteChannelConfiguration;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+/**
+ * This class contains a number of snippets for the {@link BigQuery} interface.
+ */
+public class BigQuerySnippets {
+
+ private final BigQuery bigquery;
+
+ public BigQuerySnippets(BigQuery bigquery) {
+ this.bigquery = bigquery;
+ }
+
+ /**
+ * Example of creating a dataset.
+ */
+ // [TARGET create(DatasetInfo, DatasetOption...)]
+ // [VARIABLE "my_dataset_name"]
+ public Dataset createDataset(String datasetName) {
+ // [START createDataset]
+ Dataset dataset = null;
+ DatasetInfo datasetInfo = DatasetInfo.builder(datasetName).build();
+ try {
+ // the dataset was created
+ dataset = bigquery.create(datasetInfo);
+ } catch (BigQueryException e) {
+ // the dataset was not created
+ }
+ // [END createDataset]
+ return dataset;
+ }
+
+ /**
+ * Example of updating a dataset by changing its friendly name.
+ */
+ // [TARGET update(DatasetInfo, DatasetOption...)]
+ // [VARIABLE "my_dataset_name"]
+ // [VARIABLE "some_new_friendly_name"]
+ public Dataset updateDataset(String datasetName, String newFriendlyName) {
+ // [START updateDataset]
+ Dataset oldDataset = bigquery.getDataset(datasetName);
+ DatasetInfo datasetInfo = oldDataset.toBuilder().friendlyName(newFriendlyName).build();
+ Dataset newDataset = bigquery.update(datasetInfo);
+ // [END updateDataset]
+ return newDataset;
+ }
+
+ /**
+ * Example of updating a table by changing its friendly name.
+ */
+ // [TARGET update(TableInfo, TableOption...)]
+ // [VARIABLE "my_dataset_name"]
+ // [VARIABLE "my_table_name"]
+ // [VARIABLE "new_friendly_name"]
+ public Table updateTable(String datasetName, String tableName, String newFriendlyName) {
+ // [START updateTable]
+ Table oldTable = bigquery.getTable(datasetName, tableName);
+ TableInfo tableInfo = oldTable.toBuilder().friendlyName(newFriendlyName).build();
+ Table newTable = bigquery.update(tableInfo);
+ // [END updateTable]
+ return newTable;
+ }
+
+ /**
+ * Example of listing datasets, specifying the page size.
+ */
+ // [TARGET listDatasets(DatasetListOption...)]
+ public Page listDatasets() {
+ // [START listDatasets]
+ Page datasets = bigquery.listDatasets(DatasetListOption.pageSize(100));
+ Iterator datasetIterator = datasets.iterateAll();
+ while (datasetIterator.hasNext()) {
+ Dataset dataset = datasetIterator.next();
+ // do something with the dataset
+ }
+ // [END listDatasets]
+ return datasets;
+ }
+
+ /**
+ * Example of listing datasets in a project, specifying the page size.
+ */
+ // [TARGET listDatasets(String, DatasetListOption...)]
+ // [VARIABLE "my_project_id"]
+ public Page listDatasets(String projectId) {
+ // [START listDatasets]
+ Page datasets = bigquery.listDatasets(projectId, DatasetListOption.pageSize(100));
+ Iterator datasetIterator = datasets.iterateAll();
+ while (datasetIterator.hasNext()) {
+ Dataset dataset = datasetIterator.next();
+ // do something with the dataset
+ }
+ // [END listDatasets]
+ return datasets;
+ }
+
+ /**
+ * Example of deleting a dataset from its id, even if non-empty.
+ */
+ // [TARGET delete(String, DatasetDeleteOption...)]
+ // [VARIABLE "my_dataset_name"]
+ public Boolean deleteDataset(String datasetName) {
+ // [START deleteDataset]
+ Boolean deleted = bigquery.delete(datasetName, DatasetDeleteOption.deleteContents());
+ if (deleted) {
+ // the dataset was deleted
+ } else {
+ // the dataset was not found
+ }
+ // [END deleteDataset]
+ return deleted;
+ }
+
+ /**
+ * Example of deleting a dataset, even if non-empty.
+ */
+ // [TARGET delete(DatasetId, DatasetDeleteOption...)]
+ // [VARIABLE "my_project_id"]
+ // [VARIABLE "my_dataset_name"]
+ public Boolean deleteDatasetFromId(String projectId, String datasetName) {
+ // [START deleteDatasetFromId]
+ DatasetId datasetId = DatasetId.of(projectId, datasetName);
+ Boolean deleted = bigquery.delete(datasetId, DatasetDeleteOption.deleteContents());
+ if (deleted) {
+ // the dataset was deleted
+ } else {
+ // the dataset was not found
+ }
+ // [END deleteDatasetFromId]
+ return deleted;
+ }
+
+ /**
+ * Example of deleting a table.
+ */
+ // [TARGET delete(String, String)]
+ // [VARIABLE "my_dataset_name"]
+ // [VARIABLE "my_table_name"]
+ public Boolean deleteTable(String datasetName, String tableName) {
+ // [START deleteTable]
+ Boolean deleted = bigquery.delete(datasetName, tableName);
+ if (deleted) {
+ // the table was deleted
+ } else {
+ // the table was not found
+ }
+ // [END deleteTable]
+ return deleted;
+ }
+
+ /**
+ * Example of deleting a table.
+ */
+ // [TARGET delete(TableId)]
+ // [VARIABLE "my_project_id"]
+ // [VARIABLE "my_dataset_name"]
+ // [VARIABLE "my_table_name"]
+ public Boolean deleteTableFromId(String projectId, String datasetName, String tableName) {
+ // [START deleteTableFromId]
+ TableId tableId = TableId.of(projectId, datasetName, tableName);
+ Boolean deleted = bigquery.delete(tableId);
+ if (deleted) {
+ // the table was deleted
+ } else {
+ // the table was not found
+ }
+ // [END deleteTableFromId]
+ return deleted;
+ }
+
+ /**
+ * Example of listing the tables in a dataset, specifying the page size.
+ */
+ // [TARGET listTables(String, TableListOption...)]
+ // [VARIABLE "my_dataset_name"]
+ public Page listTables(String datasetName) {
+ // [START listTables]
+ Page tables = bigquery.listTables(datasetName, TableListOption.pageSize(100));
+ Iterator tableIterator = tables.iterateAll();
+ while (tableIterator.hasNext()) {
+ Table table = tableIterator.next();
+ // do something with the table
+ }
+ // [END listTables]
+ return tables;
+ }
+
+
+ /**
+ * Example of listing the tables in a dataset.
+ */
+ // [TARGET listTables(DatasetId, TableListOption...)]
+ // [VARIABLE "my_project_id"]
+ // [VARIABLE "my_dataset_name"]
+ public Page listTablesFromId(String projectId, String datasetName) {
+ // [START listTablesFromId]
+ DatasetId datasetId = DatasetId.of(projectId, datasetName);
+ Page tables = bigquery.listTables(datasetId, TableListOption.pageSize(100));
+ Iterator tableIterator = tables.iterateAll();
+ while (tableIterator.hasNext()) {
+ Table table = tableIterator.next();
+ // do something with the table
+ }
+ // [END listTablesFromId]
+ return tables;
+ }
+
+ /**
+ * Example of getting a dataset.
+ */
+ // [TARGET getDataset(String, DatasetOption...)]
+ // [VARIABLE "my_dataset"]
+ public Dataset getDataset(String datasetName) {
+ // [START getDataset]
+ Dataset dataset = bigquery.getDataset(datasetName);
+ // [END getDataset]
+ return dataset;
+ }
+
+ /**
+ * Example of getting a dataset.
+ */
+ // [TARGET getDataset(DatasetId, DatasetOption...)]
+ // [VARIABLE "my_project_id"]
+ // [VARIABLE "my_dataset_name"]
+ public Dataset getDatasetFromId(String projectId, String datasetName) {
+ // [START getDatasetFromId]
+ DatasetId datasetId = DatasetId.of(projectId, datasetName);
+ Dataset dataset = bigquery.getDataset(datasetId);
+ // [END getDatasetFromId]
+ return dataset;
+ }
+
+ /**
+ * Example of getting a table.
+ */
+ // [TARGET getTable(String, String, TableOption...)]
+ // [VARIABLE "my_dataset_name"]
+ // [VARIABLE "my_table_name"]
+ public Table getTable(String datasetName, String tableName) {
+ // [START getTable]
+ Table table = bigquery.getTable(datasetName, tableName);
+ // [END getTable]
+ return table;
+ }
+
+ /**
+ * Example of getting a table.
+ */
+ // [TARGET getTable(TableId, TableOption...)]
+ // [VARIABLE "my_project_id"]
+ // [VARIABLE "my_dataset_name"]
+ // [VARIABLE "my_table_name"]
+ public Table getTableFromId(String projectId, String datasetName, String tableName) {
+ // [START getTableFromId]
+ TableId tableId = TableId.of(projectId, datasetName, tableName);
+ Table table = bigquery.getTable(tableId);
+ // [END getTableFromId]
+ return table;
+ }
+
+ /**
+ * Example of creating a channel with which to write to a table.
+ */
+ // [TARGET writer(WriteChannelConfiguration)]
+ // [VARIABLE "my_dataset_name"]
+ // [VARIABLE "my_table_name"]
+ // [VARIABLE "StringValue1\nStringValue2\n"]
+ public BaseWriteChannel writeToTable(
+ String datasetName, String tableName, String csvData) throws IOException {
+ // [START writeToTable]
+ TableId tableId = TableId.of(datasetName, tableName);
+ WriteChannelConfiguration writeChannelConfiguration =
+ WriteChannelConfiguration.builder(tableId).formatOptions(FormatOptions.csv()).build();
+ BaseWriteChannel writer =
+ bigquery.writer(writeChannelConfiguration);
+ // Write data to writer
+ try {
+ writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
+ } catch (IOException e) {
+ // Unable to write data
+ }
+ writer.close();
+ // [END writeToTable]
+ return writer;
+ }
+
+ /**
+ * Example of inserting rows into a table without running a load job.
+ */
+ // [TARGET insertAll(InsertAllRequest)]
+ // [VARIABLE "my_dataset_name"]
+ // [VARIABLE "my_table_name"]
+ public InsertAllResponse insertAll(String datasetName, String tableName) {
+ // [START insertAll]
+ TableId tableId = TableId.of(datasetName, tableName);
+ // Values of the row to insert
+ Map rowContent = new HashMap<>();
+ rowContent.put("booleanField", true);
+ // Bytes are passed in base64
+ rowContent.put("bytesField", "DQ4KDQ==");
+ InsertAllResponse response = bigquery.insertAll(InsertAllRequest.builder(tableId)
+ .addRow("rowId", rowContent)
+ // More rows can be added in the same RPC by invoking .addRow() on the builder
+ .build());
+ if (response.hasErrors()) {
+ // If any of the insertions failed, this lets you inspect the errors
+ for (Entry> entry : response.insertErrors().entrySet()) {
+ // inspect row error
+ }
+ }
+ // [END insertAll]
+ return response;
+ }
+
+ /**
+ * Example of creating a table.
+ */
+ // [TARGET create(TableInfo, TableOption...)]
+ // [VARIABLE "my_dataset_name"]
+ // [VARIABLE "my_table_name"]
+ // [VARIABLE "string_field"]
+ public Table createTable(String datasetName, String tableName, String fieldName) {
+ // [START createTable]
+ TableId tableId = TableId.of(datasetName, tableName);
+ // Table field definition
+ Field field = Field.of(fieldName, Field.Type.string());
+ // Table schema definition
+ Schema schema = Schema.of(field);
+ TableDefinition tableDefinition = StandardTableDefinition.of(schema);
+ TableInfo tableInfo = TableInfo.builder(tableId, tableDefinition).build();
+ Table table = bigquery.create(tableInfo);
+ // [END createTable]
+ return table;
+ }
+
+ /**
+ * Example of listing table rows, specifying the page size.
+ */
+ // [TARGET listTableData(String, String, TableDataListOption...)]
+ // [VARIABLE "my_dataset_name"]
+ // [VARIABLE "my_table_name"]
+ public Page> listTableData(String datasetName, String tableName) {
+ // [START listTableData]
+ Page> tableData =
+ bigquery.listTableData(datasetName, tableName, TableDataListOption.pageSize(100));
+ Iterator> rowIterator = tableData.iterateAll();
+ while (rowIterator.hasNext()) {
+ List row = rowIterator.next();
+ // do something with the row
+ }
+ // [END listTableData]
+ return tableData;
+ }
+
+ /**
+ * Example of listing table rows, specifying the page size.
+ */
+ // [TARGET listTableData(TableId, TableDataListOption...)]
+ // [VARIABLE "my_dataset_name"]
+ // [VARIABLE "my_table_name"]
+ public Page> listTableDataFromId(String datasetName, String tableName) {
+ // [START listTableDataFromId]
+ TableId tableIdObject = TableId.of(datasetName, tableName);
+ Page> tableData =
+ bigquery.listTableData(tableIdObject, TableDataListOption.pageSize(100));
+ Iterator> rowIterator = tableData.iterateAll();
+ while (rowIterator.hasNext()) {
+ List row = rowIterator.next();
+ // do something with the row
+ }
+ // [END listTableDataFromId]
+ return tableData;
+ }
+
+ /**
+ * Example of creating a query job.
+ */
+ // [TARGET create(JobInfo, JobOption...)]
+ // [VARIABLE "SELECT field FROM my_dataset_name.my_table_name"]
+ public Job createJob(String query) {
+ // [START createJob]
+ Job job = null;
+ JobConfiguration jobConfiguration = QueryJobConfiguration.of(query);
+ JobInfo jobInfo = JobInfo.of(jobConfiguration);
+ try {
+ job = bigquery.create(jobInfo);
+ } catch (BigQueryException e) {
+ // the job was not created
+ }
+ // [END createJob]
+ return job;
+ }
+
+ /**
+ * Example of listing jobs, specifying the page size.
+ */
+ // [TARGET listJobs(JobListOption...)]
+ public Page listJobs() {
+ // [START listJobs]
+ Page jobs = bigquery.listJobs(JobListOption.pageSize(100));
+ Iterator jobIterator = jobs.iterateAll();
+ while (jobIterator.hasNext()) {
+ Job job = jobIterator.next();
+ // do something with the job
+ }
+ // [END listJobs]
+ return jobs;
+ }
+
+ /**
+ * Example of getting a job.
+ */
+ // [TARGET getJob(String, JobOption...)]
+ // [VARIABLE "my_job_name"]
+ public Job getJob(String jobName) {
+ // [START getJob]
+ Job job = bigquery.getJob(jobName);
+ if (job == null) {
+ // job was not found
+ }
+ // [END getJob]
+ return job;
+ }
+
+ /**
+ * Example of getting a job.
+ */
+ // [TARGET getJob(JobId, JobOption...)]
+ // [VARIABLE "my_job_name"]
+ public Job getJobFromId(String jobName) {
+ // [START getJobFromId]
+ JobId jobIdObject = JobId.of(jobName);
+ Job job = bigquery.getJob(jobIdObject);
+ if (job == null) {
+ // job was not found
+ }
+ // [END getJobFromId]
+ return job;
+ }
+
+
+ /**
+ * Example of cancelling a job.
+ */
+ // [TARGET cancel(String)]
+ // [VARIABLE "my_job_name"]
+ public boolean cancelJob(String jobName) {
+ // [START cancelJob]
+ boolean success = bigquery.cancel(jobName);
+ if (success) {
+ // job was cancelled
+ } else {
+ // job was not found
+ }
+ // [END cancelJob]
+ return success;
+ }
+
+ /**
+ * Example of cancelling a job.
+ */
+ // [TARGET cancel(JobId)]
+ // [VARIABLE "my_job_name"]
+ public boolean cancelJobFromId(String jobName) {
+ // [START cancelJobFromId]
+ JobId jobId = JobId.of(jobName);
+ boolean success = bigquery.cancel(jobId);
+ if (success) {
+ // job was cancelled
+ } else {
+ // job was not found
+ }
+ // [END cancelJobFromId]
+ return success;
+ }
+
+ /**
+ * Example of running a query.
+ */
+ // [TARGET query(QueryRequest)]
+ // [VARIABLE "SELECT unique(corpus) FROM [bigquery-public-data:samples.shakespeare]"]
+ public QueryResponse runQuery(String query) throws InterruptedException {
+ // [START runQuery]
+ QueryRequest request = QueryRequest.of(query);
+ QueryResponse response = bigquery.query(request);
+ // Wait for things to finish
+ while (!response.jobCompleted()) {
+ Thread.sleep(1000);
+ response = bigquery.getQueryResults(response.jobId());
+ }
+ if (response.hasErrors()) {
+ // handle errors
+ }
+ QueryResult result = response.result();
+ Iterator> rowIterator = result.iterateAll();
+ while (rowIterator.hasNext()) {
+ List row = rowIterator.next();
+ // do something with the data
+ }
+ // [END runQuery]
+ return response;
+ }
+
+ /**
+ * Example of getting the results of query.
+ */
+ // [TARGET getQueryResults(JobId, QueryResultsOption...)]
+ // [VARIABLE "SELECT unique(corpus) FROM [bigquery-public-data:samples.shakespeare]"]
+ public QueryResponse queryResults(final String query) throws InterruptedException {
+ // [START queryResults]
+ QueryRequest request = QueryRequest.of(query);
+ QueryResponse response = bigquery.query(request);
+ // Wait for things to finish
+ while (!response.jobCompleted()) {
+ Thread.sleep(1000);
+ response = bigquery.getQueryResults(response.jobId());
+ }
+ if (response.hasErrors()) {
+ // handle errors
+ }
+ QueryResult result = response.result();
+ Iterator> rowIterator = result.iterateAll();
+ while (rowIterator.hasNext()) {
+ List row = rowIterator.next();
+ // do something with the data
+ }
+ // [END queryResults]
+ return response;
+ }
+}
diff --git a/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITBigQuerySnippets.java b/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITBigQuerySnippets.java
new file mode 100644
index 000000000000..a60ab6a31beb
--- /dev/null
+++ b/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITBigQuerySnippets.java
@@ -0,0 +1,249 @@
+/*
+ * Copyright 2016 Google Inc. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.examples.bigquery.snippets;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import com.google.cloud.Page;
+import com.google.cloud.bigquery.BigQuery;
+import com.google.cloud.bigquery.BigQuery.DatasetDeleteOption;
+import com.google.cloud.bigquery.Dataset;
+import com.google.cloud.bigquery.DatasetId;
+import com.google.cloud.bigquery.DatasetInfo;
+import com.google.cloud.bigquery.Field;
+import com.google.cloud.bigquery.Field.Type;
+import com.google.cloud.bigquery.FieldValue;
+import com.google.cloud.bigquery.InsertAllResponse;
+import com.google.cloud.bigquery.Job;
+import com.google.cloud.bigquery.JobId;
+import com.google.cloud.bigquery.QueryResponse;
+import com.google.cloud.bigquery.QueryResult;
+import com.google.cloud.bigquery.Schema;
+import com.google.cloud.bigquery.StandardTableDefinition;
+import com.google.cloud.bigquery.Table;
+import com.google.cloud.bigquery.TableId;
+import com.google.cloud.bigquery.TableInfo;
+import com.google.cloud.bigquery.testing.RemoteBigQueryHelper;
+import com.google.common.base.Function;
+import com.google.common.collect.Iterators;
+import com.google.common.collect.Sets;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.Timeout;
+
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
+
+public class ITBigQuerySnippets {
+
+ private static final String DATASET = RemoteBigQueryHelper.generateDatasetName();
+ private static final String OTHER_DATASET = RemoteBigQueryHelper.generateDatasetName();
+ private static final String QUERY =
+ "SELECT unique(corpus) FROM [bigquery-public-data:samples.shakespeare]";
+ private static final Function TO_JOB_ID_FUNCTION = new Function() {
+ @Override
+ public JobId apply(Job job) {
+ return job.jobId();
+ }
+ };
+ private static final Function TO_TABLE_ID_FUNCTION =
+ new Function() {
+ @Override
+ public TableId apply(Table table) {
+ return table.tableId();
+ }
+ };
+ private static final Function TO_DATASET_ID_FUNCTION =
+ new Function() {
+ @Override
+ public DatasetId apply(Dataset dataset) {
+ return dataset.datasetId();
+ }
+ };
+
+ private static BigQuery bigquery;
+ private static BigQuerySnippets bigquerySnippets;
+
+ @Rule
+ public Timeout globalTimeout = Timeout.seconds(300);
+
+ @BeforeClass
+ public static void beforeClass() {
+ bigquery = RemoteBigQueryHelper.create().options().service();
+ bigquerySnippets = new BigQuerySnippets(bigquery);
+ bigquery.create(DatasetInfo.builder(DATASET).build());
+ }
+
+ @AfterClass
+ public static void afterClass() throws ExecutionException, InterruptedException {
+ bigquery.delete(DATASET, DatasetDeleteOption.deleteContents());
+ bigquery.delete(OTHER_DATASET, DatasetDeleteOption.deleteContents());
+ }
+
+ @Test
+ public void testCreateGetAndDeleteTable() throws InterruptedException {
+ String tableName = "test_create_get_delete";
+ String fieldName = "aField";
+ Table table = bigquerySnippets.createTable(DATASET, tableName, fieldName);
+ assertNotNull(table);
+ TableId tableId = TableId.of(bigquery.options().projectId(), DATASET, tableName);
+ assertEquals(tableId, bigquerySnippets.getTable(tableId.dataset(), tableId.table()).tableId());
+ assertNotNull(bigquerySnippets.updateTable(DATASET, tableName, "new friendly name"));
+ assertEquals("new friendly name",
+ bigquerySnippets.getTableFromId(tableId.project(), tableId.dataset(), tableId.table())
+ .friendlyName());
+ Set tables = Sets.newHashSet(
+ Iterators.transform(bigquerySnippets.listTables(DATASET).iterateAll(),
+ TO_TABLE_ID_FUNCTION));
+ while (!tables.contains(tableId)) {
+ Thread.sleep(500);
+ tables = Sets.newHashSet(
+ Iterators.transform(bigquerySnippets.listTables(DATASET).iterateAll(),
+ TO_TABLE_ID_FUNCTION));
+ }
+ tables = Sets.newHashSet(Iterators.transform(
+ bigquerySnippets.listTablesFromId(tableId.project(), DATASET).iterateAll(),
+ TO_TABLE_ID_FUNCTION));
+ while (!tables.contains(tableId)) {
+ Thread.sleep(500);
+ tables = Sets.newHashSet(Iterators.transform(
+ bigquerySnippets.listTablesFromId(tableId.project(), DATASET).iterateAll(),
+ TO_TABLE_ID_FUNCTION));
+ }
+ assertTrue(bigquerySnippets.deleteTable(DATASET, tableName));
+ assertFalse(bigquerySnippets.deleteTableFromId(tableId.project(), DATASET, tableName));
+ }
+
+ @Test
+ public void testCreateGetAndDeleteDataset() throws InterruptedException {
+ DatasetId datasetId = DatasetId.of(bigquery.options().projectId(), OTHER_DATASET);
+ Dataset dataset = bigquerySnippets.createDataset(OTHER_DATASET);
+ assertNotNull(dataset);
+ assertEquals(datasetId, bigquerySnippets.getDataset(OTHER_DATASET).datasetId());
+ assertNotNull(bigquerySnippets.updateDataset(OTHER_DATASET, "new friendly name"));
+ assertEquals("new friendly name",
+ bigquerySnippets.getDatasetFromId(datasetId.project(), OTHER_DATASET).friendlyName());
+ Set datasets = Sets.newHashSet(
+ Iterators.transform(bigquerySnippets.listDatasets().iterateAll(),
+ TO_DATASET_ID_FUNCTION));
+ while (!datasets.contains(datasetId)) {
+ Thread.sleep(500);
+ datasets = Sets.newHashSet(
+ Iterators.transform(bigquerySnippets.listDatasets().iterateAll(),
+ TO_DATASET_ID_FUNCTION));
+ }
+ datasets = Sets.newHashSet(
+ Iterators.transform(bigquerySnippets.listDatasets(datasetId.project()).iterateAll(),
+ TO_DATASET_ID_FUNCTION));
+ while (!datasets.contains(datasetId)) {
+ Thread.sleep(500);
+ datasets = Sets.newHashSet(
+ Iterators.transform(bigquerySnippets.listDatasets(datasetId.project()).iterateAll(),
+ TO_DATASET_ID_FUNCTION));
+ }
+ assertTrue(bigquerySnippets.deleteDataset(OTHER_DATASET));
+ assertFalse(bigquerySnippets.deleteDatasetFromId(datasetId.project(), OTHER_DATASET));
+ }
+
+ @Test
+ public void testWriteAndListTableData() throws IOException, InterruptedException {
+ String tableName = "test_write_and_list_table_data";
+ String fieldName = "string_field";
+ assertNotNull(bigquerySnippets.createTable(DATASET, tableName, fieldName));
+ bigquerySnippets.writeToTable(DATASET, tableName, "StringValue1\nStringValue2\n");
+ Page> listPage = bigquerySnippets.listTableData(DATASET, tableName);
+ while (Iterators.size(listPage.iterateAll()) < 2) {
+ Thread.sleep(500);
+ listPage = bigquerySnippets.listTableData(DATASET, tableName);
+ }
+ Iterator> rowIterator = listPage.values().iterator();
+ assertEquals("StringValue1", rowIterator.next().get(0).stringValue());
+ assertEquals("StringValue2", rowIterator.next().get(0).stringValue());
+ assertTrue(bigquerySnippets.deleteTable(DATASET, tableName));
+ }
+
+ @Test
+ public void testInsertAllAndListTableData() throws IOException, InterruptedException {
+ String tableName = "test_insert_all_and_list_table_data";
+ String fieldName1 = "booleanField";
+ String fieldName2 = "bytesField";
+ TableId tableId = TableId.of(DATASET, tableName);
+ Schema schema =
+ Schema.of(Field.of(fieldName1, Type.bool()), Field.of(fieldName2, Type.bytes()));
+ TableInfo table = TableInfo.of(tableId, StandardTableDefinition.of(schema));
+ assertNotNull(bigquery.create(table));
+ InsertAllResponse response = bigquerySnippets.insertAll(DATASET, tableName);
+ assertFalse(response.hasErrors());
+ assertTrue(response.insertErrors().isEmpty());
+ Page> listPage = bigquerySnippets.listTableDataFromId(DATASET, tableName);
+ while (Iterators.size(listPage.iterateAll()) < 1) {
+ Thread.sleep(500);
+ listPage = bigquerySnippets.listTableDataFromId(DATASET, tableName);
+ }
+ List row = listPage.values().iterator().next();
+ assertEquals(true, row.get(0).booleanValue());
+ assertArrayEquals(new byte[]{0xD, 0xE, 0xA, 0xD}, row.get(1).bytesValue());
+ assertTrue(bigquerySnippets.deleteTable(DATASET, tableName));
+ }
+
+ @Test
+ public void testJob() throws ExecutionException, InterruptedException {
+ Job job1 = bigquerySnippets.createJob(QUERY);
+ Job job2 = bigquerySnippets.createJob(QUERY);
+ assertNotNull(job1);
+ assertNotNull(job2);
+ assertEquals(job1.jobId(), bigquerySnippets.getJob(job1.jobId().job()).jobId());
+ assertEquals(job2.jobId(), bigquerySnippets.getJobFromId(job2.jobId().job()).jobId());
+ Set jobs = Sets.newHashSet(Iterators.transform(bigquerySnippets.listJobs().iterateAll(),
+ TO_JOB_ID_FUNCTION));
+ while (!jobs.contains(job1.jobId()) || !jobs.contains(job2.jobId())) {
+ Thread.sleep(500);
+ jobs = Sets.newHashSet(Iterators.transform(bigquerySnippets.listJobs().iterateAll(),
+ TO_JOB_ID_FUNCTION));
+ }
+ assertTrue(bigquerySnippets.cancelJob(job1.jobId().job()));
+ assertTrue(bigquerySnippets.cancelJobFromId(job2.jobId().job()));
+ }
+
+ @Test
+ public void testRunQuery() throws InterruptedException {
+ QueryResponse queryResponse = bigquerySnippets.runQuery(QUERY);
+ assertNotNull(queryResponse);
+ assertTrue(queryResponse.jobCompleted());
+ assertFalse(queryResponse.hasErrors());
+ QueryResult result = queryResponse.result();
+ assertNotNull(result);
+ assertTrue(bigquerySnippets.cancelJob(queryResponse.jobId().job()));
+ queryResponse = bigquerySnippets.queryResults(QUERY);
+ assertNotNull(queryResponse);
+ assertTrue(queryResponse.jobCompleted());
+ assertFalse(queryResponse.hasErrors());
+ result = queryResponse.result();
+ assertNotNull(result);
+ assertTrue(bigquerySnippets.cancelJobFromId(queryResponse.jobId().job()));
+ }
+}