diff --git a/gcloud-java-bigquery/README.md b/gcloud-java-bigquery/README.md index cd2c5727f249..077e4fbc332a 100644 --- a/gcloud-java-bigquery/README.md +++ b/gcloud-java-bigquery/README.md @@ -118,7 +118,7 @@ Field stringField = Field.of("StringField", Field.Type.string()); // Table schema definition Schema schema = Schema.of(stringField); // Create a table -BaseTableInfo createdTableInfo = bigquery.create(TableInfo.of(tableId, schema)); +TableInfo createdTableInfo = bigquery.create(TableInfo.of(tableId, schema)); ``` #### Loading data into a table @@ -232,7 +232,7 @@ public class GcloudBigQueryExample { // Table schema definition Schema schema = Schema.of(stringField); // Create a table - BaseTableInfo createdTableInfo = bigquery.create(TableInfo.of(tableId, schema)); + TableInfo createdTableInfo = bigquery.create(TableInfo.of(tableId, schema)); // Define rows to insert Map firstRow = new HashMap<>(); diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java index da8f23e9a0ba..70c225942829 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQuery.java @@ -463,14 +463,14 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * * @throws BigQueryException upon failure */ - BaseTableInfo create(BaseTableInfo table, TableOption... options) throws BigQueryException; + T create(T table, TableOption... options) throws BigQueryException; /** * Creates a new job. * * @throws BigQueryException upon failure */ - JobInfo create(JobInfo job, JobOption... options) throws BigQueryException; + T create(T job, JobOption... options) throws BigQueryException; /** * Returns the requested dataset or {@code null} if not found. @@ -541,14 +541,14 @@ public static QueryResultsOption maxWaitTime(long maxWaitTime) { * * @throws BigQueryException upon failure */ - BaseTableInfo update(BaseTableInfo table, TableOption... options) throws BigQueryException; + T update(T table, TableOption... options) throws BigQueryException; /** * Returns the requested table or {@code null} if not found. * * @throws BigQueryException upon failure */ - BaseTableInfo getTable(String datasetId, String tableId, TableOption... options) + T getTable(String datasetId, String tableId, TableOption... options) throws BigQueryException; /** @@ -556,7 +556,8 @@ BaseTableInfo getTable(String datasetId, String tableId, TableOption... options) * * @throws BigQueryException upon failure */ - BaseTableInfo getTable(TableId tableId, TableOption... options) throws BigQueryException; + T getTable(TableId tableId, TableOption... options) + throws BigQueryException; /** * Lists the tables in the dataset. This method returns partial information on each table @@ -610,14 +611,14 @@ Page> listTableData(TableId tableId, TableDataListOption... opt * * @throws BigQueryException upon failure */ - JobInfo getJob(String jobId, JobOption... options) throws BigQueryException; + T getJob(String jobId, JobOption... options) throws BigQueryException; /** * Returns the requested job or {@code null} if not found. * * @throws BigQueryException upon failure */ - JobInfo getJob(JobId jobId, JobOption... options) throws BigQueryException; + T getJob(JobId jobId, JobOption... options) throws BigQueryException; /** * Lists the jobs. diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java index e2f8f890c6a3..62685d8ecc46 100644 --- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java +++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/BigQueryImpl.java @@ -199,7 +199,7 @@ public Dataset call() { } @Override - public BaseTableInfo create(BaseTableInfo table, TableOption... options) + public T create(T table, TableOption... options) throws BigQueryException { final Table tablePb = setProjectId(table).toPb(); final Map optionsMap = optionMap(options); @@ -216,7 +216,7 @@ public Table call() { } @Override - public JobInfo create(JobInfo job, JobOption... options) throws BigQueryException { + public T create(T job, JobOption... options) throws BigQueryException { final Job jobPb = setProjectId(job).toPb(); final Map optionsMap = optionMap(options); try { @@ -335,7 +335,7 @@ public Dataset call() { } @Override - public BaseTableInfo update(BaseTableInfo table, TableOption... options) + public T update(T table, TableOption... options) throws BigQueryException { final Table tablePb = setProjectId(table).toPb(); final Map optionsMap = optionMap(options); @@ -352,13 +352,13 @@ public Table call() { } @Override - public BaseTableInfo getTable(final String datasetId, final String tableId, + public T getTable(final String datasetId, final String tableId, TableOption... options) throws BigQueryException { return getTable(TableId.of(datasetId, tableId), options); } @Override - public BaseTableInfo getTable(final TableId tableId, TableOption... options) + public T getTable(final TableId tableId, TableOption... options) throws BigQueryException { final Map optionsMap = optionMap(options); try { @@ -368,7 +368,7 @@ public Table call() { return bigQueryRpc.getTable(tableId.dataset(), tableId.table(), optionsMap); } }, options().retryParams(), EXCEPTION_HANDLER); - return answer == null ? null : BaseTableInfo.fromPb(answer); + return answer == null ? null : BaseTableInfo.fromPb(answer); } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); } @@ -466,12 +466,13 @@ public List apply(TableRow rowPb) { } @Override - public JobInfo getJob(String jobId, JobOption... options) throws BigQueryException { + public T getJob(String jobId, JobOption... options) throws BigQueryException { return getJob(JobId.of(jobId), options); } @Override - public JobInfo getJob(final JobId jobId, JobOption... options) throws BigQueryException { + public T getJob(final JobId jobId, JobOption... options) + throws BigQueryException { final Map optionsMap = optionMap(options); try { Job answer = runWithRetries(new Callable() { @@ -480,7 +481,7 @@ public Job call() { return bigQueryRpc.getJob(jobId.job(), optionsMap); } }, options().retryParams(), EXCEPTION_HANDLER); - return answer == null ? null : JobInfo.fromPb(answer); + return answer == null ? null : JobInfo.fromPb(answer); } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); } diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java index ecd9b23c1eb1..bcc946f65006 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java @@ -103,41 +103,43 @@ public class BigQueryImplTest { .description("FieldDescription3") .build(); private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); - private static final BaseTableInfo TABLE_INFO = TableInfo.of(TABLE_ID, TABLE_SCHEMA); - private static final BaseTableInfo OTHER_TABLE_INFO = TableInfo.of(OTHER_TABLE_ID, TABLE_SCHEMA); - private static final BaseTableInfo TABLE_INFO_WITH_PROJECT = + private static final TableInfo TABLE_INFO = TableInfo.of(TABLE_ID, TABLE_SCHEMA); + private static final TableInfo OTHER_TABLE_INFO = TableInfo.of(OTHER_TABLE_ID, TABLE_SCHEMA); + private static final TableInfo TABLE_INFO_WITH_PROJECT = TableInfo.of(TABLE_ID_WITH_PROJECT, TABLE_SCHEMA); - private static final JobInfo LOAD_JOB = LoadJobInfo.of(TABLE_ID, "URI"); - private static final JobInfo LOAD_JOB_WITH_PROJECT = LoadJobInfo.of(TABLE_ID_WITH_PROJECT, "URI"); - private static final JobInfo COMPLETE_LOAD_JOB = LoadJobInfo.builder(TABLE_ID_WITH_PROJECT, "URI") - .jobId(JobId.of(PROJECT, JOB)) - .build(); - private static final JobInfo COPY_JOB = + private static final LoadJobInfo LOAD_JOB = LoadJobInfo.of(TABLE_ID, "URI"); + private static final LoadJobInfo LOAD_JOB_WITH_PROJECT = + LoadJobInfo.of(TABLE_ID_WITH_PROJECT, "URI"); + private static final LoadJobInfo COMPLETE_LOAD_JOB = + LoadJobInfo.builder(TABLE_ID_WITH_PROJECT, "URI") + .jobId(JobId.of(PROJECT, JOB)) + .build(); + private static final CopyJobInfo COPY_JOB = CopyJobInfo.of(TABLE_ID, ImmutableList.of(TABLE_ID, TABLE_ID)); - private static final JobInfo COPY_JOB_WITH_PROJECT = + private static final CopyJobInfo COPY_JOB_WITH_PROJECT = CopyJobInfo.of(TABLE_ID_WITH_PROJECT, ImmutableList.of(TABLE_ID_WITH_PROJECT, TABLE_ID_WITH_PROJECT)); - private static final JobInfo COMPLETE_COPY_JOB = + private static final CopyJobInfo COMPLETE_COPY_JOB = CopyJobInfo.builder(TABLE_ID_WITH_PROJECT, ImmutableList.of(TABLE_ID_WITH_PROJECT, TABLE_ID_WITH_PROJECT)) .jobId(JobId.of(PROJECT, JOB)) .build(); - private static final JobInfo QUERY_JOB = QueryJobInfo.builder("SQL") + private static final QueryJobInfo QUERY_JOB = QueryJobInfo.builder("SQL") .defaultDataset(DatasetId.of(DATASET)) .destinationTable(TABLE_ID) .build(); - private static final JobInfo QUERY_JOB_WITH_PROJECT = QueryJobInfo.builder("SQL") + private static final QueryJobInfo QUERY_JOB_WITH_PROJECT = QueryJobInfo.builder("SQL") .defaultDataset(DatasetId.of(PROJECT, DATASET)) .destinationTable(TABLE_ID_WITH_PROJECT) .build(); - private static final JobInfo COMPLETE_QUERY_JOB = QueryJobInfo.builder("SQL") + private static final QueryJobInfo COMPLETE_QUERY_JOB = QueryJobInfo.builder("SQL") .defaultDataset(DatasetId.of(PROJECT, DATASET)).destinationTable(TABLE_ID_WITH_PROJECT) .jobId(JobId.of(PROJECT, JOB)) .build(); - private static final JobInfo EXTRACT_JOB = ExtractJobInfo.of(TABLE_ID, "URI"); - private static final JobInfo EXTRACT_JOB_WITH_PROJECT = + private static final ExtractJobInfo EXTRACT_JOB = ExtractJobInfo.of(TABLE_ID, "URI"); + private static final ExtractJobInfo EXTRACT_JOB_WITH_PROJECT = ExtractJobInfo.of(TABLE_ID_WITH_PROJECT, "URI"); - private static final JobInfo COMPLETE_EXTRACT_JOB = + private static final ExtractJobInfo COMPLETE_EXTRACT_JOB = ExtractJobInfo.builder(TABLE_ID_WITH_PROJECT, "URI") .jobId(JobId.of(PROJECT, JOB)) .build(); @@ -447,7 +449,7 @@ public void testCreateTable() { .andReturn(TABLE_INFO_WITH_PROJECT.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - BaseTableInfo table = bigquery.create(TABLE_INFO); + TableInfo table = bigquery.create(TABLE_INFO); assertEquals(TABLE_INFO_WITH_PROJECT, table); } @@ -459,7 +461,7 @@ public void testCreateTableWithSelectedFields() { .andReturn(TABLE_INFO_WITH_PROJECT.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - BaseTableInfo table = bigquery.create(TABLE_INFO, TABLE_OPTION_FIELDS); + TableInfo table = bigquery.create(TABLE_INFO, TABLE_OPTION_FIELDS); String selector = (String) capturedOptions.getValue().get(TABLE_OPTION_FIELDS.rpcOption()); assertTrue(selector.contains("tableReference")); assertTrue(selector.contains("schema")); @@ -474,7 +476,7 @@ public void testGetTable() { .andReturn(TABLE_INFO_WITH_PROJECT.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - BaseTableInfo table = bigquery.getTable(DATASET, TABLE); + TableInfo table = bigquery.getTable(DATASET, TABLE); assertEquals(TABLE_INFO_WITH_PROJECT, table); } @@ -484,7 +486,7 @@ public void testGetTableFromTableId() { .andReturn(TABLE_INFO_WITH_PROJECT.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - BaseTableInfo table = bigquery.getTable(TABLE_ID); + TableInfo table = bigquery.getTable(TABLE_ID); assertEquals(TABLE_INFO_WITH_PROJECT, table); } @@ -495,7 +497,7 @@ public void testGetTableWithSelectedFields() { .andReturn(TABLE_INFO_WITH_PROJECT.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - BaseTableInfo table = bigquery.getTable(TABLE_ID, TABLE_OPTION_FIELDS); + TableInfo table = bigquery.getTable(TABLE_ID, TABLE_OPTION_FIELDS); String selector = (String) capturedOptions.getValue().get(TABLE_OPTION_FIELDS.rpcOption()); assertTrue(selector.contains("tableReference")); assertTrue(selector.contains("schema")); @@ -507,8 +509,8 @@ public void testGetTableWithSelectedFields() { @Test public void testListTables() { String cursor = "cursor"; - ImmutableList tableList = ImmutableList.of(TABLE_INFO_WITH_PROJECT, - OTHER_TABLE_INFO); + ImmutableList tableList = + ImmutableList.of(TABLE_INFO_WITH_PROJECT, OTHER_TABLE_INFO); Tuple> result = Tuple.of(cursor, Iterables.transform(tableList, BaseTableInfo.TO_PB_FUNCTION)); EasyMock.expect(bigqueryRpcMock.listTables(DATASET, EMPTY_RPC_OPTIONS)).andReturn(result); @@ -522,8 +524,8 @@ public void testListTables() { @Test public void testListTablesFromDatasetId() { String cursor = "cursor"; - ImmutableList tableList = ImmutableList.of(TABLE_INFO_WITH_PROJECT, - OTHER_TABLE_INFO); + ImmutableList tableList = + ImmutableList.of(TABLE_INFO_WITH_PROJECT, OTHER_TABLE_INFO); Tuple> result = Tuple.of(cursor, Iterables.transform(tableList, BaseTableInfo.TO_PB_FUNCTION)); EasyMock.expect(bigqueryRpcMock.listTables(DATASET, EMPTY_RPC_OPTIONS)).andReturn(result); @@ -537,8 +539,8 @@ public void testListTablesFromDatasetId() { @Test public void testListTablesWithOptions() { String cursor = "cursor"; - ImmutableList tableList = ImmutableList.of(TABLE_INFO_WITH_PROJECT, - OTHER_TABLE_INFO); + ImmutableList tableList = + ImmutableList.of(TABLE_INFO_WITH_PROJECT, OTHER_TABLE_INFO); Tuple> result = Tuple.of(cursor, Iterables.transform(tableList, BaseTableInfo.TO_PB_FUNCTION)); EasyMock.expect(bigqueryRpcMock.listTables(DATASET, TABLE_LIST_OPTIONS)).andReturn(result); @@ -568,30 +570,30 @@ public void testDeleteTableFromTableId() { @Test public void testUpdateTable() { - BaseTableInfo updatedTableInfo = TABLE_INFO.toBuilder().description("newDescription").build(); - BaseTableInfo updatedTableInfoWithProject = TABLE_INFO_WITH_PROJECT.toBuilder() + TableInfo updatedTableInfo = TABLE_INFO.toBuilder().description("newDescription").build(); + TableInfo updatedTableInfoWithProject = TABLE_INFO_WITH_PROJECT.toBuilder() .description("newDescription") .build(); EasyMock.expect(bigqueryRpcMock.patch(updatedTableInfoWithProject.toPb(), EMPTY_RPC_OPTIONS)) .andReturn(updatedTableInfoWithProject.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - BaseTableInfo table = bigquery.update(updatedTableInfo); + TableInfo table = bigquery.update(updatedTableInfo); assertEquals(updatedTableInfoWithProject, table); } @Test public void testUpdateTableWithSelectedFields() { Capture> capturedOptions = Capture.newInstance(); - BaseTableInfo updatedTableInfo = TABLE_INFO.toBuilder().description("newDescription").build(); - BaseTableInfo updatedTableInfoWithProject = TABLE_INFO_WITH_PROJECT.toBuilder() + TableInfo updatedTableInfo = TABLE_INFO.toBuilder().description("newDescription").build(); + TableInfo updatedTableInfoWithProject = TABLE_INFO_WITH_PROJECT.toBuilder() .description("newDescription") .build(); EasyMock.expect(bigqueryRpcMock.patch(eq(updatedTableInfoWithProject.toPb()), capture(capturedOptions))).andReturn(updatedTableInfoWithProject.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - BaseTableInfo table = bigquery.update(updatedTableInfo, TABLE_OPTION_FIELDS); + TableInfo table = bigquery.update(updatedTableInfo, TABLE_OPTION_FIELDS); String selector = (String) capturedOptions.getValue().get(TABLE_OPTION_FIELDS.rpcOption()); assertTrue(selector.contains("tableReference")); assertTrue(selector.contains("schema")); @@ -727,7 +729,7 @@ public void testCreateQueryJob() { .andReturn(COMPLETE_QUERY_JOB.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - JobInfo job = bigquery.create(QUERY_JOB); + QueryJobInfo job = bigquery.create(QUERY_JOB); assertEquals(COMPLETE_QUERY_JOB, job); } @@ -737,7 +739,7 @@ public void testCreateLoadJob() { .andReturn(COMPLETE_LOAD_JOB.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - JobInfo job = bigquery.create(LOAD_JOB); + LoadJobInfo job = bigquery.create(LOAD_JOB); assertEquals(COMPLETE_LOAD_JOB, job); } @@ -747,7 +749,7 @@ public void testCreateCopyJob() { .andReturn(COMPLETE_COPY_JOB.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - JobInfo job = bigquery.create(COPY_JOB); + CopyJobInfo job = bigquery.create(COPY_JOB); assertEquals(COMPLETE_COPY_JOB, job); } @@ -757,7 +759,7 @@ public void testCreateExtractJob() { .andReturn(COMPLETE_EXTRACT_JOB.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - JobInfo job = bigquery.create(EXTRACT_JOB); + ExtractJobInfo job = bigquery.create(EXTRACT_JOB); assertEquals(COMPLETE_EXTRACT_JOB, job); } @@ -769,7 +771,7 @@ public void testCreateJobWithSelectedFields() { .andReturn(COMPLETE_QUERY_JOB.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - JobInfo job = bigquery.create(QUERY_JOB, JOB_OPTION_FIELDS); + QueryJobInfo job = bigquery.create(QUERY_JOB, JOB_OPTION_FIELDS); assertEquals(COMPLETE_QUERY_JOB, job); String selector = (String) capturedOptions.getValue().get(JOB_OPTION_FIELDS.rpcOption()); assertTrue(selector.contains("jobReference")); @@ -784,7 +786,7 @@ public void testGetJob() { .andReturn(COMPLETE_COPY_JOB.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - JobInfo job = bigquery.getJob(JOB); + CopyJobInfo job = bigquery.getJob(JOB); assertEquals(COMPLETE_COPY_JOB, job); } @@ -794,15 +796,15 @@ public void testGetJobFromJobId() { .andReturn(COMPLETE_COPY_JOB.toPb()); EasyMock.replay(bigqueryRpcMock); bigquery = options.service(); - JobInfo job = bigquery.getJob(JobId.of(PROJECT, JOB)); + CopyJobInfo job = bigquery.getJob(JobId.of(PROJECT, JOB)); assertEquals(COMPLETE_COPY_JOB, job); } @Test public void testListJobs() { String cursor = "cursor"; - ImmutableList jobList = ImmutableList.of(QUERY_JOB_WITH_PROJECT, - LOAD_JOB_WITH_PROJECT); + ImmutableList jobList = + ImmutableList.of(QUERY_JOB_WITH_PROJECT, LOAD_JOB_WITH_PROJECT); Tuple> result = Tuple.of(cursor, Iterables.transform(jobList, new Function() { @Override @@ -821,8 +823,8 @@ public Job apply(JobInfo jobInfo) { @Test public void testListJobsWithOptions() { String cursor = "cursor"; - ImmutableList jobList = ImmutableList.of(QUERY_JOB_WITH_PROJECT, - LOAD_JOB_WITH_PROJECT); + ImmutableList jobList = + ImmutableList.of(QUERY_JOB_WITH_PROJECT, LOAD_JOB_WITH_PROJECT); Tuple> result = Tuple.of(cursor, Iterables.transform(jobList, new Function() { @Override @@ -843,8 +845,8 @@ public Job apply(JobInfo jobInfo) { public void testListJobsWithSelectedFields() { String cursor = "cursor"; Capture> capturedOptions = Capture.newInstance(); - ImmutableList jobList = ImmutableList.of(QUERY_JOB_WITH_PROJECT, - LOAD_JOB_WITH_PROJECT); + ImmutableList jobList = + ImmutableList.of(QUERY_JOB_WITH_PROJECT, LOAD_JOB_WITH_PROJECT); Tuple> result = Tuple.of(cursor, Iterables.transform(jobList, new Function() { @Override diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java index caff9dd510d1..4a4f01de4124 100644 --- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java +++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java @@ -146,7 +146,7 @@ public static void beforeClass() throws IOException, InterruptedException { JSON_CONTENT.getBytes(StandardCharsets.UTF_8)); DatasetInfo info = DatasetInfo.builder(DATASET).description(DESCRIPTION).build(); bigquery.create(info); - JobInfo job = LoadJobInfo.builder(TABLE_ID, "gs://" + BUCKET + "/" + JSON_LOAD_FILE) + LoadJobInfo job = LoadJobInfo.builder(TABLE_ID, "gs://" + BUCKET + "/" + JSON_LOAD_FILE) .createDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) .schema(TABLE_SCHEMA) .formatOptions(FormatOptions.json()) @@ -612,9 +612,9 @@ public void testCreateAndGetJob() throws InterruptedException { assertEquals(DATASET, createdTableInfo.tableId().dataset()); assertEquals(sourceTableName, createdTableInfo.tableId().table()); TableId destinationTable = TableId.of(DATASET, destinationTableName); - JobInfo job = CopyJobInfo.of(destinationTable, sourceTable); - CopyJobInfo createdJob = (CopyJobInfo) bigquery.create(job); - CopyJobInfo remoteJob = (CopyJobInfo) bigquery.getJob(createdJob.jobId()); + CopyJobInfo job = CopyJobInfo.of(destinationTable, sourceTable); + CopyJobInfo createdJob = bigquery.create(job); + CopyJobInfo remoteJob = bigquery.getJob(createdJob.jobId()); assertEquals(createdJob.jobId(), remoteJob.jobId()); assertEquals(createdJob.sourceTables(), remoteJob.sourceTables()); assertEquals(createdJob.destinationTable(), remoteJob.destinationTable()); @@ -640,9 +640,8 @@ public void testCreateAndGetJobWithSelectedFields() throws InterruptedException assertEquals(DATASET, createdTableInfo.tableId().dataset()); assertEquals(sourceTableName, createdTableInfo.tableId().table()); TableId destinationTable = TableId.of(DATASET, destinationTableName); - JobInfo job = CopyJobInfo.of(destinationTable, sourceTable); - CopyJobInfo createdJob = (CopyJobInfo) bigquery.create(job, - JobOption.fields(BigQuery.JobField.ETAG)); + CopyJobInfo job = CopyJobInfo.of(destinationTable, sourceTable); + CopyJobInfo createdJob = bigquery.create(job, JobOption.fields(BigQuery.JobField.ETAG)); assertNotNull(createdJob.jobId()); assertNotNull(createdJob.sourceTables()); assertNotNull(createdJob.destinationTable()); @@ -651,7 +650,7 @@ public void testCreateAndGetJobWithSelectedFields() throws InterruptedException assertNull(createdJob.status()); assertNull(createdJob.selfLink()); assertNull(createdJob.userEmail()); - CopyJobInfo remoteJob = (CopyJobInfo) bigquery.getJob(createdJob.jobId(), + CopyJobInfo remoteJob = bigquery.getJob(createdJob.jobId(), JobOption.fields(BigQuery.JobField.ETAG)); assertEquals(createdJob.jobId(), remoteJob.jobId()); assertEquals(createdJob.sourceTables(), remoteJob.sourceTables()); @@ -678,8 +677,8 @@ public void testCopyJob() throws InterruptedException { assertEquals(DATASET, createdTableInfo.tableId().dataset()); assertEquals(sourceTableName, createdTableInfo.tableId().table()); TableId destinationTable = TableId.of(DATASET, destinationTableName); - JobInfo job = CopyJobInfo.of(destinationTable, sourceTable); - JobInfo remoteJob = bigquery.create(job); + CopyJobInfo job = CopyJobInfo.of(destinationTable, sourceTable); + CopyJobInfo remoteJob = bigquery.create(job); while (remoteJob.status().state() != JobStatus.State.DONE) { Thread.sleep(1000); remoteJob = bigquery.getJob(remoteJob.jobId()); @@ -706,7 +705,7 @@ public void testQueryJob() throws InterruptedException { .defaultDataset(DatasetId.of(DATASET)) .destinationTable(destinationTable) .build(); - JobInfo remoteJob = bigquery.create(job); + QueryJobInfo remoteJob = bigquery.create(job); while (remoteJob.status().state() != JobStatus.State.DONE) { Thread.sleep(1000); remoteJob = bigquery.getJob(remoteJob.jobId()); @@ -738,29 +737,29 @@ public void testQueryJob() throws InterruptedException { } @Test - public void testExtract() throws InterruptedException { + public void testExtractJob() throws InterruptedException { String tableName = "test_export_job_table"; TableId destinationTable = TableId.of(DATASET, tableName); - JobInfo remoteJob = bigquery.create( + LoadJobInfo remoteLoadJob = bigquery.create( LoadJobInfo.builder(destinationTable, "gs://" + BUCKET + "/" + LOAD_FILE) .schema(SIMPLE_SCHEMA) .build()); - while (remoteJob.status().state() != JobStatus.State.DONE) { + while (remoteLoadJob.status().state() != JobStatus.State.DONE) { Thread.sleep(1000); - remoteJob = bigquery.getJob(remoteJob.jobId()); + remoteLoadJob = bigquery.getJob(remoteLoadJob.jobId()); } - assertNull(remoteJob.status().error()); + assertNull(remoteLoadJob.status().error()); ExtractJobInfo extractJob = ExtractJobInfo.builder(destinationTable, "gs://" + BUCKET + "/" + EXTRACT_FILE) .printHeader(false) .build(); - remoteJob = bigquery.create(extractJob); - while (remoteJob.status().state() != JobStatus.State.DONE) { + ExtractJobInfo remoteExtractJob = bigquery.create(extractJob); + while (remoteExtractJob.status().state() != JobStatus.State.DONE) { Thread.sleep(1000); - remoteJob = bigquery.getJob(remoteJob.jobId()); + remoteExtractJob = bigquery.getJob(remoteExtractJob.jobId()); } - assertNull(remoteJob.status().error()); + assertNull(remoteExtractJob.status().error()); assertEquals(CSV_CONTENT, new String(storage.readAllBytes(BUCKET, EXTRACT_FILE), StandardCharsets.UTF_8)); assertTrue(bigquery.delete(DATASET, tableName));