From 8dd8786a5e9a38c7738889a2a6bca23848d3a3ee Mon Sep 17 00:00:00 2001 From: Marco Ziccardi Date: Fri, 4 Mar 2016 15:32:20 +0100 Subject: [PATCH] Remove redundant throws from services method signatures --- .../com/google/gcloud/bigquery/BigQuery.java | 53 ++++---- .../google/gcloud/bigquery/BigQueryImpl.java | 59 ++++----- .../com/google/gcloud/spi/BigQueryRpc.java | 87 +++++++++--- .../google/gcloud/spi/DefaultBigQueryRpc.java | 45 +++---- .../com/google/gcloud/spi/DatastoreRpc.java | 40 +++++- .../gcloud/spi/DefaultDatastoreRpc.java | 14 +- .../resourcemanager/ResourceManager.java | 2 +- .../gcloud/spi/DefaultResourceManagerRpc.java | 13 +- .../google/gcloud/spi/ResourceManagerRpc.java | 56 ++++++-- .../google/gcloud/spi/DefaultStorageRpc.java | 24 ++-- .../com/google/gcloud/spi/StorageRpc.java | 125 ++++++++++++++---- .../com/google/gcloud/storage/Storage.java | 44 +++--- 12 files changed, 364 insertions(+), 198 deletions(-) diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java index 4b5d3ef0c81a..986c595e350d 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java @@ -457,35 +457,35 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * * @throws BigQueryException upon failure */ - Dataset create(DatasetInfo dataset, DatasetOption... options) throws BigQueryException; + Dataset create(DatasetInfo dataset, DatasetOption... options); /** * Creates a new table. * * @throws BigQueryException upon failure */ - Table create(TableInfo table, TableOption... options) throws BigQueryException; + Table create(TableInfo table, TableOption... options); /** * Creates a new job. * * @throws BigQueryException upon failure */ - Job create(JobInfo job, JobOption... options) throws BigQueryException; + Job create(JobInfo job, JobOption... options); /** * Returns the requested dataset or {@code null} if not found. * * @throws BigQueryException upon failure */ - Dataset getDataset(String datasetId, DatasetOption... options) throws BigQueryException; + Dataset getDataset(String datasetId, DatasetOption... options); /** * Returns the requested dataset or {@code null} if not found. * * @throws BigQueryException upon failure */ - Dataset getDataset(DatasetId datasetId, DatasetOption... options) throws BigQueryException; + Dataset getDataset(DatasetId datasetId, DatasetOption... options); /** * Lists the project's datasets. This method returns partial information on each dataset @@ -495,7 +495,7 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * * @throws BigQueryException upon failure */ - Page listDatasets(DatasetListOption... options) throws BigQueryException; + Page listDatasets(DatasetListOption... options); /** * Deletes the requested dataset. @@ -503,7 +503,7 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * @return {@code true} if dataset was deleted, {@code false} if it was not found * @throws BigQueryException upon failure */ - boolean delete(String datasetId, DatasetDeleteOption... options) throws BigQueryException; + boolean delete(String datasetId, DatasetDeleteOption... options); /** * Deletes the requested dataset. @@ -511,7 +511,7 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * @return {@code true} if dataset was deleted, {@code false} if it was not found * @throws BigQueryException upon failure */ - boolean delete(DatasetId datasetId, DatasetDeleteOption... options) throws BigQueryException; + boolean delete(DatasetId datasetId, DatasetDeleteOption... options); /** * Deletes the requested table. @@ -519,7 +519,7 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * @return {@code true} if table was deleted, {@code false} if it was not found * @throws BigQueryException upon failure */ - boolean delete(String datasetId, String tableId) throws BigQueryException; + boolean delete(String datasetId, String tableId); /** * Deletes the requested table. @@ -527,35 +527,35 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * @return {@code true} if table was deleted, {@code false} if it was not found * @throws BigQueryException upon failure */ - boolean delete(TableId tableId) throws BigQueryException; + boolean delete(TableId tableId); /** * Updates dataset information. * * @throws BigQueryException upon failure */ - Dataset update(DatasetInfo dataset, DatasetOption... options) throws BigQueryException; + Dataset update(DatasetInfo dataset, DatasetOption... options); /** * Updates table information. * * @throws BigQueryException upon failure */ - Table update(TableInfo table, TableOption... options) throws BigQueryException; + Table update(TableInfo table, TableOption... options); /** * Returns the requested table or {@code null} if not found. * * @throws BigQueryException upon failure */ - Table getTable(String datasetId, String tableId, TableOption... options) throws BigQueryException; + Table getTable(String datasetId, String tableId, TableOption... options); /** * Returns the requested table or {@code null} if not found. * * @throws BigQueryException upon failure */ - Table getTable(TableId tableId, TableOption... options) throws BigQueryException; + Table getTable(TableId tableId, TableOption... options); /** * Lists the tables in the dataset. This method returns partial information on each table @@ -566,7 +566,7 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * * @throws BigQueryException upon failure */ - Page listTables(String datasetId, TableListOption... options) throws BigQueryException; + Page
listTables(String datasetId, TableListOption... options); /** * Lists the tables in the dataset. This method returns partial information on each table @@ -577,14 +577,14 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * * @throws BigQueryException upon failure */ - Page
listTables(DatasetId datasetId, TableListOption... options) throws BigQueryException; + Page
listTables(DatasetId datasetId, TableListOption... options); /** * Sends an insert all request. * * @throws BigQueryException upon failure */ - InsertAllResponse insertAll(InsertAllRequest request) throws BigQueryException; + InsertAllResponse insertAll(InsertAllRequest request); /** * Lists the table's rows. @@ -592,36 +592,35 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * @throws BigQueryException upon failure */ Page> listTableData(String datasetId, String tableId, - TableDataListOption... options) throws BigQueryException; + TableDataListOption... options); /** * Lists the table's rows. * * @throws BigQueryException upon failure */ - Page> listTableData(TableId tableId, TableDataListOption... options) - throws BigQueryException; + Page> listTableData(TableId tableId, TableDataListOption... options); /** * Returns the requested job or {@code null} if not found. * * @throws BigQueryException upon failure */ - Job getJob(String jobId, JobOption... options) throws BigQueryException; + Job getJob(String jobId, JobOption... options); /** * Returns the requested job or {@code null} if not found. * * @throws BigQueryException upon failure */ - Job getJob(JobId jobId, JobOption... options) throws BigQueryException; + Job getJob(JobId jobId, JobOption... options); /** * Lists the jobs. * * @throws BigQueryException upon failure */ - Page listJobs(JobListOption... options) throws BigQueryException; + Page listJobs(JobListOption... options); /** * Sends a job cancel request. This call will return immediately. The job status can then be @@ -632,7 +631,7 @@ Page> listTableData(TableId tableId, TableDataListOption... opt * found * @throws BigQueryException upon failure */ - boolean cancel(String jobId) throws BigQueryException; + boolean cancel(String jobId); /** * Sends a job cancel request. This call will return immediately. The job status can then be @@ -643,21 +642,21 @@ Page> listTableData(TableId tableId, TableDataListOption... opt * found * @throws BigQueryException upon failure */ - boolean cancel(JobId tableId) throws BigQueryException; + boolean cancel(JobId tableId); /** * Runs the query associated with the request. * * @throws BigQueryException upon failure */ - QueryResponse query(QueryRequest request) throws BigQueryException; + QueryResponse query(QueryRequest request); /** * Returns results of the query associated with the provided job. * * @throws BigQueryException upon failure */ - QueryResponse getQueryResults(JobId job, QueryResultsOption... options) throws BigQueryException; + QueryResponse getQueryResults(JobId job, QueryResultsOption... options); /** * Returns a channel to write data to be inserted into a BigQuery table. Data format and other diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java index 1158dd86c83d..ce881c6ea079 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java @@ -153,7 +153,7 @@ public QueryResult nextPage() { } @Override - public Dataset create(DatasetInfo dataset, DatasetOption... options) throws BigQueryException { + public Dataset create(DatasetInfo dataset, DatasetOption... options) { final com.google.api.services.bigquery.model.Dataset datasetPb = dataset.setProjectId(options().projectId()).toPb(); final Map optionsMap = optionMap(options); @@ -171,7 +171,7 @@ public com.google.api.services.bigquery.model.Dataset call() { } @Override - public Table create(TableInfo table, TableOption... options) throws BigQueryException { + public Table create(TableInfo table, TableOption... options) { final com.google.api.services.bigquery.model.Table tablePb = table.setProjectId(options().projectId()).toPb(); final Map optionsMap = optionMap(options); @@ -189,7 +189,7 @@ public com.google.api.services.bigquery.model.Table call() { } @Override - public Job create(JobInfo job, JobOption... options) throws BigQueryException { + public Job create(JobInfo job, JobOption... options) { final com.google.api.services.bigquery.model.Job jobPb = job.setProjectId(options().projectId()).toPb(); final Map optionsMap = optionMap(options); @@ -207,13 +207,12 @@ public com.google.api.services.bigquery.model.Job call() { } @Override - public Dataset getDataset(String datasetId, DatasetOption... options) throws BigQueryException { + public Dataset getDataset(String datasetId, DatasetOption... options) { return getDataset(DatasetId.of(datasetId), options); } @Override - public Dataset getDataset(final DatasetId datasetId, DatasetOption... options) - throws BigQueryException { + public Dataset getDataset(final DatasetId datasetId, DatasetOption... options) { final Map optionsMap = optionMap(options); try { com.google.api.services.bigquery.model.Dataset answer = @@ -230,7 +229,7 @@ public com.google.api.services.bigquery.model.Dataset call() { } @Override - public Page listDatasets(DatasetListOption... options) throws BigQueryException { + public Page listDatasets(DatasetListOption... options) { return listDatasets(options(), optionMap(options)); } @@ -261,13 +260,12 @@ public Dataset apply(com.google.api.services.bigquery.model.Dataset dataset) { } @Override - public boolean delete(String datasetId, DatasetDeleteOption... options) throws BigQueryException { + public boolean delete(String datasetId, DatasetDeleteOption... options) { return delete(DatasetId.of(datasetId), options); } @Override - public boolean delete(final DatasetId datasetId, DatasetDeleteOption... options) - throws BigQueryException { + public boolean delete(final DatasetId datasetId, DatasetDeleteOption... options) { final Map optionsMap = optionMap(options); try { return runWithRetries(new Callable() { @@ -282,12 +280,12 @@ public Boolean call() { } @Override - public boolean delete(String datasetId, String tableId) throws BigQueryException { + public boolean delete(String datasetId, String tableId) { return delete(TableId.of(datasetId, tableId)); } @Override - public boolean delete(final TableId tableId) throws BigQueryException { + public boolean delete(final TableId tableId) { try { return runWithRetries(new Callable() { @Override @@ -301,7 +299,7 @@ public Boolean call() { } @Override - public Dataset update(DatasetInfo dataset, DatasetOption... options) throws BigQueryException { + public Dataset update(DatasetInfo dataset, DatasetOption... options) { final com.google.api.services.bigquery.model.Dataset datasetPb = dataset.setProjectId(options().projectId()).toPb(); final Map optionsMap = optionMap(options); @@ -319,7 +317,7 @@ public com.google.api.services.bigquery.model.Dataset call() { } @Override - public Table update(TableInfo table, TableOption... options) throws BigQueryException { + public Table update(TableInfo table, TableOption... options) { final com.google.api.services.bigquery.model.Table tablePb = table.setProjectId(options().projectId()).toPb(); final Map optionsMap = optionMap(options); @@ -337,13 +335,12 @@ public com.google.api.services.bigquery.model.Table call() { } @Override - public Table getTable(final String datasetId, final String tableId, TableOption... options) - throws BigQueryException { + public Table getTable(final String datasetId, final String tableId, TableOption... options) { return getTable(TableId.of(datasetId, tableId), options); } @Override - public Table getTable(final TableId tableId, TableOption... options) throws BigQueryException { + public Table getTable(final TableId tableId, TableOption... options) { final Map optionsMap = optionMap(options); try { com.google.api.services.bigquery.model.Table answer = @@ -360,14 +357,12 @@ public com.google.api.services.bigquery.model.Table call() { } @Override - public Page
listTables(String datasetId, TableListOption... options) - throws BigQueryException { + public Page
listTables(String datasetId, TableListOption... options) { return listTables(datasetId, options(), optionMap(options)); } @Override - public Page
listTables(DatasetId datasetId, TableListOption... options) - throws BigQueryException { + public Page
listTables(DatasetId datasetId, TableListOption... options) { return listTables(datasetId.dataset(), options(), optionMap(options)); } @@ -399,7 +394,7 @@ public Table apply(com.google.api.services.bigquery.model.Table table) { } @Override - public InsertAllResponse insertAll(InsertAllRequest request) throws BigQueryException { + public InsertAllResponse insertAll(InsertAllRequest request) { final TableId tableId = request.table(); final TableDataInsertAllRequest requestPb = new TableDataInsertAllRequest(); requestPb.setIgnoreUnknownValues(request.ignoreUnknownValues()); @@ -418,13 +413,12 @@ public Rows apply(RowToInsert rowToInsert) { @Override public Page> listTableData(String datasetId, String tableId, - TableDataListOption... options) throws BigQueryException { + TableDataListOption... options) { return listTableData(TableId.of(datasetId, tableId), options(), optionMap(options)); } @Override - public Page> listTableData(TableId tableId, TableDataListOption... options) - throws BigQueryException { + public Page> listTableData(TableId tableId, TableDataListOption... options) { return listTableData(tableId, options(), optionMap(options)); } @@ -459,12 +453,12 @@ public List apply(TableRow rowPb) { } @Override - public Job getJob(String jobId, JobOption... options) throws BigQueryException { + public Job getJob(String jobId, JobOption... options) { return getJob(JobId.of(jobId), options); } @Override - public Job getJob(final JobId jobId, JobOption... options) throws BigQueryException { + public Job getJob(final JobId jobId, JobOption... options) { final Map optionsMap = optionMap(options); try { com.google.api.services.bigquery.model.Job answer = @@ -481,7 +475,7 @@ public com.google.api.services.bigquery.model.Job call() { } @Override - public Page listJobs(JobListOption... options) throws BigQueryException { + public Page listJobs(JobListOption... options) { return listJobs(options(), optionMap(options)); } @@ -508,12 +502,12 @@ public Job apply(com.google.api.services.bigquery.model.Job job) { } @Override - public boolean cancel(String jobId) throws BigQueryException { + public boolean cancel(String jobId) { return cancel(JobId.of(jobId)); } @Override - public boolean cancel(final JobId jobId) throws BigQueryException { + public boolean cancel(final JobId jobId) { try { return runWithRetries(new Callable() { @Override @@ -527,7 +521,7 @@ public Boolean call() { } @Override - public QueryResponse query(final QueryRequest request) throws BigQueryException { + public QueryResponse query(final QueryRequest request) { try { com.google.api.services.bigquery.model.QueryResponse results = runWithRetries(new Callable() { @@ -566,8 +560,7 @@ public com.google.api.services.bigquery.model.QueryResponse call() { } @Override - public QueryResponse getQueryResults(JobId job, QueryResultsOption... options) - throws BigQueryException { + public QueryResponse getQueryResults(JobId job, QueryResultsOption... options) { Map optionsMap = optionMap(options); return getQueryResults(job, options(), optionsMap); } diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpc.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpc.java index 6062e19950e0..a1935e5ab136 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpc.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/BigQueryRpc.java @@ -100,7 +100,7 @@ public Y y() { * * @throws BigQueryException upon failure */ - Dataset getDataset(String datasetId, Map options) throws BigQueryException; + Dataset getDataset(String datasetId, Map options); /** * Lists the project's datasets. Partial information is returned on a dataset (datasetReference, @@ -108,13 +108,28 @@ public Y y() { * * @throws BigQueryException upon failure */ - Tuple> listDatasets(Map options) throws BigQueryException; + Tuple> listDatasets(Map options); - Dataset create(Dataset dataset, Map options) throws BigQueryException; + /** + * Creates a new dataset. + * + * @throws BigQueryException upon failure + */ + Dataset create(Dataset dataset, Map options); - Table create(Table table, Map options) throws BigQueryException; + /** + * Creates a new table. + * + * @throws BigQueryException upon failure + */ + Table create(Table table, Map options); - Job create(Job job, Map options) throws BigQueryException; + /** + * Creates a new job. + * + * @throws BigQueryException upon failure + */ + Job create(Job job, Map options); /** * Delete the requested dataset. @@ -122,18 +137,28 @@ public Y y() { * @return {@code true} if dataset was deleted, {@code false} if it was not found * @throws BigQueryException upon failure */ - boolean deleteDataset(String datasetId, Map options) throws BigQueryException; + boolean deleteDataset(String datasetId, Map options); - Dataset patch(Dataset dataset, Map options) throws BigQueryException; + /** + * Updates dataset information. + * + * @throws BigQueryException upon failure + */ + Dataset patch(Dataset dataset, Map options); - Table patch(Table table, Map options) throws BigQueryException; + /** + * Updates table information. + * + * @throws BigQueryException upon failure + */ + Table patch(Table table, Map options); /** * Returns the requested table or {@code null} if not found. * * @throws BigQueryException upon failure */ - Table getTable(String datasetId, String tableId, Map options) throws BigQueryException; + Table getTable(String datasetId, String tableId, Map options); /** * Lists the dataset's tables. Partial information is returned on a table (tableReference, @@ -141,8 +166,7 @@ public Y y() { * * @throws BigQueryException upon failure */ - Tuple> listTables(String dataset, Map options) - throws BigQueryException; + Tuple> listTables(String dataset, Map options); /** * Delete the requested table. @@ -150,27 +174,37 @@ Tuple> listTables(String dataset, Map options * @return {@code true} if table was deleted, {@code false} if it was not found * @throws BigQueryException upon failure */ - boolean deleteTable(String datasetId, String tableId) throws BigQueryException; + boolean deleteTable(String datasetId, String tableId); + /** + * Sends an insert all request. + * + * @throws BigQueryException upon failure + */ TableDataInsertAllResponse insertAll(String datasetId, String tableId, - TableDataInsertAllRequest request) throws BigQueryException; + TableDataInsertAllRequest request); + /** + * Lists the table's rows. + * + * @throws BigQueryException upon failure + */ Tuple> listTableData(String datasetId, String tableId, - Map options) throws BigQueryException; + Map options); /** * Returns the requested job or {@code null} if not found. * * @throws BigQueryException upon failure */ - Job getJob(String jobId, Map options) throws BigQueryException; + Job getJob(String jobId, Map options); /** * Lists the project's jobs. * * @throws BigQueryException upon failure */ - Tuple> listJobs(Map options) throws BigQueryException; + Tuple> listJobs(Map options); /** * Sends a job cancel request. This call will return immediately, and the client will need to poll @@ -180,12 +214,21 @@ Tuple> listTableData(String datasetId, String tableId * found * @throws BigQueryException upon failure */ - boolean cancel(String jobId) throws BigQueryException; + boolean cancel(String jobId); - GetQueryResultsResponse getQueryResults(String jobId, Map options) - throws BigQueryException; + /** + * Returns results of the query associated with the provided job. + * + * @throws BigQueryException upon failure + */ + GetQueryResultsResponse getQueryResults(String jobId, Map options); - QueryResponse query(QueryRequest request) throws BigQueryException; + /** + * Runs the query associated with the request. + * + * @throws BigQueryException upon failure + */ + QueryResponse query(QueryRequest request); /** * Opens a resumable upload session to load data into a BigQuery table and returns an upload URI. @@ -193,7 +236,7 @@ GetQueryResultsResponse getQueryResults(String jobId, Map options) * @param configuration load configuration * @throws BigQueryException upon failure */ - String open(JobConfiguration configuration) throws BigQueryException; + String open(JobConfiguration configuration); /** * Uploads the provided data to the resumable upload session at the specified position. @@ -207,5 +250,5 @@ GetQueryResultsResponse getQueryResults(String jobId, Map options) * @throws BigQueryException upon failure */ void write(String uploadId, byte[] toWrite, int toWriteOffset, long destOffset, int length, - boolean last) throws BigQueryException; + boolean last); } diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java index b57f1dc8a128..a5c44129955a 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/spi/DefaultBigQueryRpc.java @@ -90,7 +90,7 @@ private static BigQueryException translate(IOException exception) { } @Override - public Dataset getDataset(String datasetId, Map options) throws BigQueryException { + public Dataset getDataset(String datasetId, Map options) { try { return bigquery.datasets() .get(this.options.projectId(), datasetId) @@ -106,8 +106,7 @@ public Dataset getDataset(String datasetId, Map options) throws BigQu } @Override - public Tuple> listDatasets(Map options) - throws BigQueryException { + public Tuple> listDatasets(Map options) { try { DatasetList datasetsList = bigquery.datasets() .list(this.options.projectId()) @@ -135,7 +134,7 @@ public Dataset apply(DatasetList.Datasets datasetPb) { } @Override - public Dataset create(Dataset dataset, Map options) throws BigQueryException { + public Dataset create(Dataset dataset, Map options) { try { return bigquery.datasets().insert(this.options.projectId(), dataset) .setFields(FIELDS.getString(options)) @@ -146,8 +145,7 @@ public Dataset create(Dataset dataset, Map options) throws BigQueryEx } @Override - public Table create(Table table, Map options) - throws BigQueryException { + public Table create(Table table, Map options) { try { // unset the type, as it is output only table.setType(null); @@ -161,7 +159,7 @@ public Table create(Table table, Map options) } @Override - public Job create(Job job, Map options) throws BigQueryException { + public Job create(Job job, Map options) { try { return bigquery.jobs() .insert(this.options.projectId(), job) @@ -173,7 +171,7 @@ public Job create(Job job, Map options) throws BigQueryException { } @Override - public boolean deleteDataset(String datasetId, Map options) throws BigQueryException { + public boolean deleteDataset(String datasetId, Map options) { try { bigquery.datasets().delete(this.options.projectId(), datasetId) .setDeleteContents(DELETE_CONTENTS.getBoolean(options)) @@ -189,7 +187,7 @@ public boolean deleteDataset(String datasetId, Map options) throws Bi } @Override - public Dataset patch(Dataset dataset, Map options) throws BigQueryException { + public Dataset patch(Dataset dataset, Map options) { try { DatasetReference reference = dataset.getDatasetReference(); return bigquery.datasets() @@ -202,7 +200,7 @@ public Dataset patch(Dataset dataset, Map options) throws BigQueryExc } @Override - public Table patch(Table table, Map options) throws BigQueryException { + public Table patch(Table table, Map options) { try { // unset the type, as it is output only table.setType(null); @@ -217,8 +215,7 @@ public Table patch(Table table, Map options) throws BigQueryException } @Override - public Table getTable(String datasetId, String tableId, Map options) - throws BigQueryException { + public Table getTable(String datasetId, String tableId, Map options) { try { return bigquery.tables() .get(this.options.projectId(), datasetId, tableId) @@ -234,8 +231,7 @@ public Table getTable(String datasetId, String tableId, Map options) } @Override - public Tuple> listTables(String datasetId, Map options) - throws BigQueryException { + public Tuple> listTables(String datasetId, Map options) { try { TableList tableList = bigquery.tables() .list(this.options.projectId(), datasetId) @@ -262,7 +258,7 @@ public Table apply(TableList.Tables tablePb) { } @Override - public boolean deleteTable(String datasetId, String tableId) throws BigQueryException { + public boolean deleteTable(String datasetId, String tableId) { try { bigquery.tables().delete(this.options.projectId(), datasetId, tableId).execute(); return true; @@ -277,7 +273,7 @@ public boolean deleteTable(String datasetId, String tableId) throws BigQueryExce @Override public TableDataInsertAllResponse insertAll(String datasetId, String tableId, - TableDataInsertAllRequest request) throws BigQueryException { + TableDataInsertAllRequest request) { try { return bigquery.tabledata() .insertAll(this.options.projectId(), datasetId, tableId, request) @@ -289,7 +285,7 @@ public TableDataInsertAllResponse insertAll(String datasetId, String tableId, @Override public Tuple> listTableData(String datasetId, String tableId, - Map options) throws BigQueryException { + Map options) { try { TableDataList tableDataList = bigquery.tabledata() .list(this.options.projectId(), datasetId, tableId) @@ -306,7 +302,7 @@ public Tuple> listTableData(String datasetId, String } @Override - public Job getJob(String jobId, Map options) throws BigQueryException { + public Job getJob(String jobId, Map options) { try { return bigquery.jobs() .get(this.options.projectId(), jobId) @@ -322,7 +318,7 @@ public Job getJob(String jobId, Map options) throws BigQueryException } @Override - public Tuple> listJobs(Map options) throws BigQueryException { + public Tuple> listJobs(Map options) { try { JobList jobsList = bigquery.jobs() .list(this.options.projectId()) @@ -363,7 +359,7 @@ public Job apply(JobList.Jobs jobPb) { } @Override - public boolean cancel(String jobId) throws BigQueryException { + public boolean cancel(String jobId) { try { bigquery.jobs().cancel(this.options.projectId(), jobId).execute(); return true; @@ -377,8 +373,7 @@ public boolean cancel(String jobId) throws BigQueryException { } @Override - public GetQueryResultsResponse getQueryResults(String jobId, Map options) - throws BigQueryException { + public GetQueryResultsResponse getQueryResults(String jobId, Map options) { try { return bigquery.jobs().getQueryResults(this.options.projectId(), jobId) .setMaxResults(MAX_RESULTS.getLong(options)) @@ -397,7 +392,7 @@ public GetQueryResultsResponse getQueryResults(String jobId, Map opti } @Override - public QueryResponse query(QueryRequest request) throws BigQueryException { + public QueryResponse query(QueryRequest request) { try { return bigquery.jobs().query(this.options.projectId(), request).execute(); } catch (IOException ex) { @@ -406,7 +401,7 @@ public QueryResponse query(QueryRequest request) throws BigQueryException { } @Override - public String open(JobConfiguration configuration) throws BigQueryException { + public String open(JobConfiguration configuration) { try { Job loadJob = new Job().setConfiguration(configuration); StringBuilder builder = new StringBuilder() @@ -429,7 +424,7 @@ public String open(JobConfiguration configuration) throws BigQueryException { @Override public void write(String uploadId, byte[] toWrite, int toWriteOffset, long destOffset, int length, - boolean last) throws BigQueryException { + boolean last) { try { GenericUrl url = new GenericUrl(uploadId); HttpRequest httpRequest = bigquery.getRequestFactory().buildPutRequest(url, diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpc.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpc.java index fd916e0a1c87..4d4540c70196 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpc.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DatastoreRpc.java @@ -35,16 +35,46 @@ */ public interface DatastoreRpc { - AllocateIdsResponse allocateIds(AllocateIdsRequest request) throws DatastoreException; + /** + * Sends an allocate IDs request. + * + * @throws DatastoreException upon failure + */ + AllocateIdsResponse allocateIds(AllocateIdsRequest request); + /** + * Sends a begin transaction request. + * + * @throws DatastoreException upon failure + */ BeginTransactionResponse beginTransaction(BeginTransactionRequest request) throws DatastoreException; - CommitResponse commit(CommitRequest request) throws DatastoreException; + /** + * Sends a commit request. + * + * @throws DatastoreException upon failure + */ + CommitResponse commit(CommitRequest request); - LookupResponse lookup(LookupRequest request) throws DatastoreException; + /** + * Sends a lookup request. + * + * @throws DatastoreException upon failure + */ + LookupResponse lookup(LookupRequest request); - RollbackResponse rollback(RollbackRequest request) throws DatastoreException; + /** + * Sends a rollback request. + * + * @throws DatastoreException upon failure + */ + RollbackResponse rollback(RollbackRequest request); - RunQueryResponse runQuery(RunQueryRequest request) throws DatastoreException; + /** + * Sends a request to run a query. + * + * @throws DatastoreException upon failure + */ + RunQueryResponse runQuery(RunQueryRequest request); } diff --git a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java b/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java index c82ff9689f68..f679cc0d5826 100644 --- a/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java +++ b/gcloud-java-datastore/src/main/java/com/google/gcloud/spi/DefaultDatastoreRpc.java @@ -111,8 +111,7 @@ private static DatastoreException translate( } @Override - public AllocateIdsResponse allocateIds(AllocateIdsRequest request) - throws DatastoreException { + public AllocateIdsResponse allocateIds(AllocateIdsRequest request) { try { return client.allocateIds(request); } catch (com.google.api.services.datastore.client.DatastoreException ex) { @@ -121,8 +120,7 @@ public AllocateIdsResponse allocateIds(AllocateIdsRequest request) } @Override - public BeginTransactionResponse beginTransaction(BeginTransactionRequest request) - throws DatastoreException { + public BeginTransactionResponse beginTransaction(BeginTransactionRequest request) { try { return client.beginTransaction(request); } catch (com.google.api.services.datastore.client.DatastoreException ex) { @@ -131,7 +129,7 @@ public BeginTransactionResponse beginTransaction(BeginTransactionRequest request } @Override - public CommitResponse commit(CommitRequest request) throws DatastoreException { + public CommitResponse commit(CommitRequest request) { try { return client.commit(request); } catch (com.google.api.services.datastore.client.DatastoreException ex) { @@ -140,7 +138,7 @@ public CommitResponse commit(CommitRequest request) throws DatastoreException { } @Override - public LookupResponse lookup(LookupRequest request) throws DatastoreException { + public LookupResponse lookup(LookupRequest request) { try { return client.lookup(request); } catch (com.google.api.services.datastore.client.DatastoreException ex) { @@ -149,7 +147,7 @@ public LookupResponse lookup(LookupRequest request) throws DatastoreException { } @Override - public RollbackResponse rollback(RollbackRequest request) throws DatastoreException { + public RollbackResponse rollback(RollbackRequest request) { try { return client.rollback(request); } catch (com.google.api.services.datastore.client.DatastoreException ex) { @@ -158,7 +156,7 @@ public RollbackResponse rollback(RollbackRequest request) throws DatastoreExcept } @Override - public RunQueryResponse runQuery(RunQueryRequest request) throws DatastoreException { + public RunQueryResponse runQuery(RunQueryRequest request) { try { return client.runQuery(request); } catch (com.google.api.services.datastore.client.DatastoreException ex) { diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManager.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManager.java index 3d27f2a33ac8..f9ddf6872414 100644 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManager.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/resourcemanager/ResourceManager.java @@ -168,7 +168,7 @@ public static ProjectListOption fields(ProjectField... fields) { } /** - * Create a new project. + * Creates a new project. * *

Initially, the project resource is owned by its creator exclusively. The creator can later * grant permission to others to read or update the project. Several APIs are activated diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/DefaultResourceManagerRpc.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/DefaultResourceManagerRpc.java index 61c622fa0c33..d30cd2df3627 100644 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/DefaultResourceManagerRpc.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/DefaultResourceManagerRpc.java @@ -38,7 +38,7 @@ private static ResourceManagerException translate(IOException exception) { } @Override - public Project create(Project project) throws ResourceManagerException { + public Project create(Project project) { try { return resourceManager.projects().create(project).execute(); } catch (IOException ex) { @@ -47,7 +47,7 @@ public Project create(Project project) throws ResourceManagerException { } @Override - public void delete(String projectId) throws ResourceManagerException { + public void delete(String projectId) { try { resourceManager.projects().delete(projectId).execute(); } catch (IOException ex) { @@ -56,7 +56,7 @@ public void delete(String projectId) throws ResourceManagerException { } @Override - public Project get(String projectId, Map options) throws ResourceManagerException { + public Project get(String projectId, Map options) { try { return resourceManager.projects() .get(projectId) @@ -74,8 +74,7 @@ public Project get(String projectId, Map options) throws ResourceMana } @Override - public Tuple> list(Map options) - throws ResourceManagerException { + public Tuple> list(Map options) { try { ListProjectsResponse response = resourceManager.projects() .list() @@ -92,7 +91,7 @@ public Tuple> list(Map options) } @Override - public void undelete(String projectId) throws ResourceManagerException { + public void undelete(String projectId) { try { resourceManager.projects().undelete(projectId).execute(); } catch (IOException ex) { @@ -101,7 +100,7 @@ public void undelete(String projectId) throws ResourceManagerException { } @Override - public Project replace(Project project) throws ResourceManagerException { + public Project replace(Project project) { try { return resourceManager.projects().update(project.getProjectId(), project).execute(); } catch (IOException ex) { diff --git a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/ResourceManagerRpc.java b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/ResourceManagerRpc.java index 52dfc2d2368e..e1d72a6a373e 100644 --- a/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/ResourceManagerRpc.java +++ b/gcloud-java-resourcemanager/src/main/java/com/google/gcloud/spi/ResourceManagerRpc.java @@ -75,17 +75,51 @@ public Y y() { } } - Project create(Project project) throws ResourceManagerException; - - void delete(String projectId) throws ResourceManagerException; - - Project get(String projectId, Map options) throws ResourceManagerException; - - Tuple> list(Map options) throws ResourceManagerException; - - void undelete(String projectId) throws ResourceManagerException; - - Project replace(Project project) throws ResourceManagerException; + /** + * Creates a new project. + * + * @throws ResourceManagerException upon failure + */ + Project create(Project project); + + /** + * Marks the project identified by the specified project ID for deletion. + * + * @throws ResourceManagerException upon failure + */ + void delete(String projectId); + + /** + * Retrieves the project identified by the specified project ID. Returns {@code null} if the + * project is not found or if the user doesn't have read permissions for the project. + * + * @throws ResourceManagerException upon failure + */ + Project get(String projectId, Map options); + + /** + * Lists the projects visible to the current user. + * + * @throws ResourceManagerException upon failure + */ + Tuple> list(Map options); + + /** + * Restores the project identified by the specified project ID. Undelete will only succeed if the + * project has a lifecycle state of {@code DELETE_REQUESTED} state. The caller must have modify + * permissions for this project. + * + * @throws ResourceManagerException upon failure + */ + void undelete(String projectId); + + /** + * Replaces the attributes of the project. The caller must have modify permissions for this + * project. + * + * @throws ResourceManagerException upon failure + */ + Project replace(Project project); // TODO(ajaykannan): implement "Organization" functionality when available (issue #319) } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java b/gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java index dc84a1de5559..3d7febfd556b 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/spi/DefaultStorageRpc.java @@ -99,7 +99,7 @@ private static StorageException translate(GoogleJsonError exception) { } @Override - public Bucket create(Bucket bucket, Map options) throws StorageException { + public Bucket create(Bucket bucket, Map options) { try { return storage.buckets() .insert(this.options.projectId(), bucket) @@ -114,7 +114,7 @@ public Bucket create(Bucket bucket, Map options) throws StorageExcept @Override public StorageObject create(StorageObject storageObject, final InputStream content, - Map options) throws StorageException { + Map options) { try { Storage.Objects.Insert insert = storage.objects() .insert(storageObject.getBucket(), storageObject, @@ -296,7 +296,7 @@ private Storage.Objects.Delete deleteRequest(StorageObject blob, Map @Override public StorageObject compose(Iterable sources, StorageObject target, - Map targetOptions) throws StorageException { + Map targetOptions) { ComposeRequest request = new ComposeRequest(); if (target.getContentType() == null) { target.setContentType("application/octet-stream"); @@ -327,8 +327,7 @@ public StorageObject compose(Iterable sources, StorageObject targ } @Override - public byte[] load(StorageObject from, Map options) - throws StorageException { + public byte[] load(StorageObject from, Map options) { try { Storage.Objects.Get getRequest = storage.objects() .get(from.getBucket(), from.getName()) @@ -347,7 +346,7 @@ public byte[] load(StorageObject from, Map options) } @Override - public BatchResponse batch(BatchRequest request) throws StorageException { + public BatchResponse batch(BatchRequest request) { List>>> partitionedToDelete = Lists.partition(request.toDelete, MAX_BATCH_DELETES); Iterator>>> iterator = partitionedToDelete.iterator(); @@ -437,7 +436,7 @@ public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) { @Override public Tuple read(StorageObject from, Map options, long position, - int bytes) throws StorageException { + int bytes) { try { Get req = storage.objects() .get(from.getBucket(), from.getName()) @@ -460,7 +459,7 @@ public Tuple read(StorageObject from, Map options, lo @Override public void write(String uploadId, byte[] toWrite, int toWriteOffset, long destOffset, int length, - boolean last) throws StorageException { + boolean last) { try { GenericUrl url = new GenericUrl(uploadId); HttpRequest httpRequest = storage.getRequestFactory().buildPutRequest(url, @@ -501,8 +500,7 @@ public void write(String uploadId, byte[] toWrite, int toWriteOffset, long destO } @Override - public String open(StorageObject object, Map options) - throws StorageException { + public String open(StorageObject object, Map options) { try { Insert req = storage.objects().insert(object.getBucket(), object); GenericUrl url = req.buildHttpRequest().getUrl(); @@ -538,16 +536,16 @@ public String open(StorageObject object, Map options) } @Override - public RewriteResponse openRewrite(RewriteRequest rewriteRequest) throws StorageException { + public RewriteResponse openRewrite(RewriteRequest rewriteRequest) { return rewrite(rewriteRequest, null); } @Override - public RewriteResponse continueRewrite(RewriteResponse previousResponse) throws StorageException { + public RewriteResponse continueRewrite(RewriteResponse previousResponse) { return rewrite(previousResponse.rewriteRequest, previousResponse.rewriteToken); } - private RewriteResponse rewrite(RewriteRequest req, String token) throws StorageException { + private RewriteResponse rewrite(RewriteRequest req, String token) { try { Long maxBytesRewrittenPerCall = req.megabytesRewrittenPerCall != null ? req.megabytesRewrittenPerCall * MEGABYTE : null; diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpc.java b/gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpc.java index e15a27114810..c76fc0f2d3b8 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpc.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/spi/StorageRpc.java @@ -217,57 +217,134 @@ public int hashCode() { } } - Bucket create(Bucket bucket, Map options) throws StorageException; + /** + * Creates a new bucket. + * + * @throws StorageException upon failure + */ + Bucket create(Bucket bucket, Map options); - StorageObject create(StorageObject object, InputStream content, Map options) - throws StorageException; + /** + * Creates a new storage object. + * + * @throws StorageException upon failure + */ + StorageObject create(StorageObject object, InputStream content, Map options); - Tuple> list(Map options) throws StorageException; + /** + * Lists the project's buckets. + * + * @throws StorageException upon failure + */ + Tuple> list(Map options); - Tuple> list(String bucket, Map options) - throws StorageException; + /** + * Lists the bucket's blobs. + * + * @throws StorageException upon failure + */ + Tuple> list(String bucket, Map options); /** * Returns the requested bucket or {@code null} if not found. * * @throws StorageException upon failure */ - Bucket get(Bucket bucket, Map options) throws StorageException; + Bucket get(Bucket bucket, Map options); /** * Returns the requested storage object or {@code null} if not found. * * @throws StorageException upon failure */ - StorageObject get(StorageObject object, Map options) - throws StorageException; + StorageObject get(StorageObject object, Map options); - Bucket patch(Bucket bucket, Map options) throws StorageException; + /** + * Updates bucket information. + * + * @throws StorageException upon failure + */ + Bucket patch(Bucket bucket, Map options); - StorageObject patch(StorageObject storageObject, Map options) - throws StorageException; + /** + * Updates the storage object's information. Original metadata are merged with metadata in the + * provided {@code storageObject}. + * + * @throws StorageException upon failure + */ + StorageObject patch(StorageObject storageObject, Map options); - boolean delete(Bucket bucket, Map options) throws StorageException; + /** + * Deletes the requested bucket. + * + * @return {@code true} if the bucket was deleted, {@code false} if it was not found + * @throws StorageException upon failure + */ + boolean delete(Bucket bucket, Map options); - boolean delete(StorageObject object, Map options) throws StorageException; + /** + * Deletes the requested storage object. + * + * @return {@code true} if the storage object was deleted, {@code false} if it was not found + * @throws StorageException upon failure + */ + boolean delete(StorageObject object, Map options); - BatchResponse batch(BatchRequest request) throws StorageException; + /** + * Sends a batch request. + * + * @throws StorageException upon failure + */ + BatchResponse batch(BatchRequest request); + /** + * Sends a compose request. + * + * @throws StorageException upon failure + */ StorageObject compose(Iterable sources, StorageObject target, - Map targetOptions) throws StorageException; + Map targetOptions); - byte[] load(StorageObject storageObject, Map options) - throws StorageException; + /** + * Reads all the bytes from a storage object. + * + * @throws StorageException upon failure + */ + byte[] load(StorageObject storageObject, Map options); - Tuple read(StorageObject from, Map options, long position, int bytes) - throws StorageException; + /** + * Reads the given amount of bytes from a storage object at the given position. + * + * @throws StorageException upon failure + */ + Tuple read(StorageObject from, Map options, long position, int bytes); - String open(StorageObject object, Map options) throws StorageException; + /** + * Opens a resumable upload channel for a given storage object. + * + * @throws StorageException upon failure + */ + String open(StorageObject object, Map options); + /** + * Writes the provided bytes to a storage object at the provided location. + * + * @throws StorageException upon failure + */ void write(String uploadId, byte[] toWrite, int toWriteOffset, long destOffset, int length, - boolean last) throws StorageException; + boolean last); - RewriteResponse openRewrite(RewriteRequest rewriteRequest) throws StorageException; + /** + * Sends a rewrite request to open a rewrite channel. + * + * @throws StorageException upon failure + */ + RewriteResponse openRewrite(RewriteRequest rewriteRequest); - RewriteResponse continueRewrite(RewriteResponse previousResponse) throws StorageException; + /** + * Continues rewriting on an already open rewrite channel. + * + * @throws StorageException + */ + RewriteResponse continueRewrite(RewriteResponse previousResponse); } diff --git a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java index 98f3450b7f10..065bcca7c9e8 100644 --- a/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java +++ b/gcloud-java-storage/src/main/java/com/google/gcloud/storage/Storage.java @@ -1215,7 +1215,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx } /** - * Create a new bucket. + * Creates a new bucket. * * @return a complete bucket * @throws StorageException upon failure @@ -1223,7 +1223,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx Bucket create(BucketInfo bucketInfo, BucketTargetOption... options); /** - * Create a new blob with no content. + * Creates a new blob with no content. * * @return a [@code Blob} with complete information * @throws StorageException upon failure @@ -1231,7 +1231,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx Blob create(BlobInfo blobInfo, BlobTargetOption... options); /** - * Create a new blob. Direct upload is used to upload {@code content}. For large content, + * Creates a new blob. Direct upload is used to upload {@code content}. For large content, * {@link #writer} is recommended as it uses resumable upload. MD5 and CRC32C hashes of * {@code content} are computed and used for validating transferred data. * @@ -1242,7 +1242,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx Blob create(BlobInfo blobInfo, byte[] content, BlobTargetOption... options); /** - * Create a new blob. Direct upload is used to upload {@code content}. For large content, + * Creates a new blob. Direct upload is used to upload {@code content}. For large content, * {@link #writer} is recommended as it uses resumable upload. By default any md5 and crc32c * values in the given {@code blobInfo} are ignored unless requested via the * {@code BlobWriteOption.md5Match} and {@code BlobWriteOption.crc32cMatch} options. The given @@ -1254,49 +1254,49 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx Blob create(BlobInfo blobInfo, InputStream content, BlobWriteOption... options); /** - * Return the requested bucket or {@code null} if not found. + * Returns the requested bucket or {@code null} if not found. * * @throws StorageException upon failure */ Bucket get(String bucket, BucketGetOption... options); /** - * Return the requested blob or {@code null} if not found. + * Returns the requested blob or {@code null} if not found. * * @throws StorageException upon failure */ Blob get(String bucket, String blob, BlobGetOption... options); /** - * Return the requested blob or {@code null} if not found. + * Returns the requested blob or {@code null} if not found. * * @throws StorageException upon failure */ Blob get(BlobId blob, BlobGetOption... options); /** - * Return the requested blob or {@code null} if not found. + * Returns the requested blob or {@code null} if not found. * * @throws StorageException upon failure */ Blob get(BlobId blob); /** - * List the project's buckets. + * Lists the project's buckets. * * @throws StorageException upon failure */ Page list(BucketListOption... options); /** - * List the bucket's blobs. + * Lists the bucket's blobs. * * @throws StorageException upon failure */ Page list(String bucket, BlobListOption... options); /** - * Update bucket information. + * Updates bucket information. * * @return the updated bucket * @throws StorageException upon failure @@ -1304,7 +1304,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx Bucket update(BucketInfo bucketInfo, BucketTargetOption... options); /** - * Update blob information. Original metadata are merged with metadata in the provided + * Updates blob information. Original metadata are merged with metadata in the provided * {@code blobInfo}. To replace metadata instead you first have to unset them. Unsetting metadata * can be done by setting the provided {@code blobInfo}'s metadata to {@code null}. * @@ -1321,7 +1321,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx Blob update(BlobInfo blobInfo, BlobTargetOption... options); /** - * Update blob information. Original metadata are merged with metadata in the provided + * Updates blob information. Original metadata are merged with metadata in the provided * {@code blobInfo}. To replace metadata instead you first have to unset them. Unsetting metadata * can be done by setting the provided {@code blobInfo}'s metadata to {@code null}. * @@ -1338,7 +1338,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx Blob update(BlobInfo blobInfo); /** - * Delete the requested bucket. + * Deletes the requested bucket. * * @return {@code true} if bucket was deleted, {@code false} if it was not found * @throws StorageException upon failure @@ -1346,7 +1346,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx boolean delete(String bucket, BucketSourceOption... options); /** - * Delete the requested blob. + * Deletes the requested blob. * * @return {@code true} if blob was deleted, {@code false} if it was not found * @throws StorageException upon failure @@ -1354,7 +1354,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx boolean delete(String bucket, String blob, BlobSourceOption... options); /** - * Delete the requested blob. + * Deletes the requested blob. * * @return {@code true} if blob was deleted, {@code false} if it was not found * @throws StorageException upon failure @@ -1362,7 +1362,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx boolean delete(BlobId blob, BlobSourceOption... options); /** - * Delete the requested blob. + * Deletes the requested blob. * * @return {@code true} if blob was deleted, {@code false} if it was not found * @throws StorageException upon failure @@ -1370,7 +1370,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx boolean delete(BlobId blob); /** - * Send a compose request. + * Sends a compose request. * * @return the composed blob * @throws StorageException upon failure @@ -1422,7 +1422,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx byte[] readAllBytes(BlobId blob, BlobSourceOption... options); /** - * Send a batch request. + * Sends a batch request. * * @return the batch response * @throws StorageException upon failure @@ -1430,7 +1430,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx BatchResponse submit(BatchRequest batchRequest); /** - * Return a channel for reading the blob's content. The blob's latest generation is read. If the + * Returns a channel for reading the blob's content. The blob's latest generation is read. If the * blob changes while reading (i.e. {@link BlobInfo#etag()} changes), subsequent calls to * {@code blobReadChannel.read(ByteBuffer)} may throw {@link StorageException}. * @@ -1443,7 +1443,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx ReadChannel reader(String bucket, String blob, BlobSourceOption... options); /** - * Return a channel for reading the blob's content. If {@code blob.generation()} is set + * Returns a channel for reading the blob's content. If {@code blob.generation()} is set * data corresponding to that generation is read. If {@code blob.generation()} is {@code null} * the blob's latest generation is read. If the blob changes while reading (i.e. * {@link BlobInfo#etag()} changes), subsequent calls to {@code blobReadChannel.read(ByteBuffer)} @@ -1459,7 +1459,7 @@ private static void checkContentType(BlobInfo blobInfo) throws IllegalArgumentEx ReadChannel reader(BlobId blob, BlobSourceOption... options); /** - * Create a blob and return a channel for writing its content. By default any md5 and crc32c + * Creates a blob and return a channel for writing its content. By default any md5 and crc32c * values in the given {@code blobInfo} are ignored unless requested via the * {@code BlobWriteOption.md5Match} and {@code BlobWriteOption.crc32cMatch} options. *