diff --git a/.github/readme/synth.metadata/synth.metadata b/.github/readme/synth.metadata/synth.metadata
index 8ed19f97e4..0204d77b54 100644
--- a/.github/readme/synth.metadata/synth.metadata
+++ b/.github/readme/synth.metadata/synth.metadata
@@ -4,14 +4,14 @@
"git": {
"name": ".",
"remote": "https://github.com/googleapis/java-bigquerystorage.git",
- "sha": "a6b53566cfd174fb36a903cbd84948defc0403e6"
+ "sha": "1554247cf55aa56281a530c721ab1650699a3efc"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "79c8dd7ee768292f933012d3a69a5b4676404cda"
+ "sha": "78437c732a60c64895778697b078497b0988346c"
}
}
]
diff --git a/README.md b/README.md
index 570237b59c..0a37147361 100644
--- a/README.md
+++ b/README.md
@@ -17,7 +17,7 @@ If you are using Maven with [BOM][libraries-bom], add this to your pom.xml file
com.google.cloud
libraries-bom
- 19.2.1
+ 19.1.0
pom
import
@@ -38,25 +38,25 @@ If you are using Maven without BOM, add this to your dependencies:
com.google.cloud
google-cloud-bigquerystorage
- 1.15.1
+ 1.15.0
```
If you are using Gradle 5.x or later, add this to your dependencies
```Groovy
-implementation platform('com.google.cloud:libraries-bom:19.2.1')
+implementation platform('com.google.cloud:libraries-bom:19.1.0')
compile 'com.google.cloud:google-cloud-bigquerystorage'
```
If you are using Gradle without BOM, add this to your dependencies
```Groovy
-compile 'com.google.cloud:google-cloud-bigquerystorage:1.15.1'
+compile 'com.google.cloud:google-cloud-bigquerystorage:1.15.0'
```
If you are using SBT, add this to your dependencies
```Scala
-libraryDependencies += "com.google.cloud" % "google-cloud-bigquerystorage" % "1.15.1"
+libraryDependencies += "com.google.cloud" % "google-cloud-bigquerystorage" % "1.15.0"
```
## Authentication
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BQTableSchemaToProtoDescriptor.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BQTableSchemaToProtoDescriptor.java
index 4ab936619c..e4aa97798f 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BQTableSchemaToProtoDescriptor.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BQTableSchemaToProtoDescriptor.java
@@ -47,14 +47,14 @@ public class BQTableSchemaToProtoDescriptor {
.put(TableFieldSchema.Type.BOOL, FieldDescriptorProto.Type.TYPE_BOOL)
.put(TableFieldSchema.Type.BYTES, FieldDescriptorProto.Type.TYPE_BYTES)
.put(TableFieldSchema.Type.DATE, FieldDescriptorProto.Type.TYPE_INT32)
- .put(TableFieldSchema.Type.DATETIME, FieldDescriptorProto.Type.TYPE_STRING)
+ .put(TableFieldSchema.Type.DATETIME, FieldDescriptorProto.Type.TYPE_INT64)
.put(TableFieldSchema.Type.DOUBLE, FieldDescriptorProto.Type.TYPE_DOUBLE)
.put(TableFieldSchema.Type.GEOGRAPHY, FieldDescriptorProto.Type.TYPE_STRING)
.put(TableFieldSchema.Type.INT64, FieldDescriptorProto.Type.TYPE_INT64)
.put(TableFieldSchema.Type.NUMERIC, FieldDescriptorProto.Type.TYPE_STRING)
.put(TableFieldSchema.Type.STRING, FieldDescriptorProto.Type.TYPE_STRING)
.put(TableFieldSchema.Type.STRUCT, FieldDescriptorProto.Type.TYPE_MESSAGE)
- .put(TableFieldSchema.Type.TIME, FieldDescriptorProto.Type.TYPE_STRING)
+ .put(TableFieldSchema.Type.TIME, FieldDescriptorProto.Type.TYPE_INT64)
.put(TableFieldSchema.Type.TIMESTAMP, FieldDescriptorProto.Type.TYPE_INT64)
.build();
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/BQTableSchemaToProtoDescriptorTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/BQTableSchemaToProtoDescriptorTest.java
index 85bba343ac..63c51cf56c 100644
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/BQTableSchemaToProtoDescriptorTest.java
+++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/BQTableSchemaToProtoDescriptorTest.java
@@ -201,7 +201,7 @@ public void testStructComplex() throws Exception {
.build();
final Table.TableFieldSchema TEST_TIME =
Table.TableFieldSchema.newBuilder()
- .setType(Table.TableFieldSchema.Type.TIME)
+ .setType(Table.TableFieldSchema.Type.INT64)
.setMode(Table.TableFieldSchema.Mode.NULLABLE)
.setName("test_time")
.build();
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BQTableSchemaToProtoDescriptorTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BQTableSchemaToProtoDescriptorTest.java
index 4cb658f638..e5d0676975 100644
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BQTableSchemaToProtoDescriptorTest.java
+++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BQTableSchemaToProtoDescriptorTest.java
@@ -38,13 +38,13 @@ public class BQTableSchemaToProtoDescriptorTest {
.put(TableFieldSchema.Type.BOOL, BoolType.getDescriptor())
.put(TableFieldSchema.Type.BYTES, BytesType.getDescriptor())
.put(TableFieldSchema.Type.DATE, Int32Type.getDescriptor())
- .put(TableFieldSchema.Type.DATETIME, StringType.getDescriptor())
+ .put(TableFieldSchema.Type.DATETIME, Int64Type.getDescriptor())
.put(TableFieldSchema.Type.DOUBLE, DoubleType.getDescriptor())
.put(TableFieldSchema.Type.GEOGRAPHY, StringType.getDescriptor())
.put(TableFieldSchema.Type.INT64, Int64Type.getDescriptor())
.put(TableFieldSchema.Type.NUMERIC, StringType.getDescriptor())
.put(TableFieldSchema.Type.STRING, StringType.getDescriptor())
- .put(TableFieldSchema.Type.TIME, StringType.getDescriptor())
+ .put(TableFieldSchema.Type.TIME, Int64Type.getDescriptor())
.put(TableFieldSchema.Type.TIMESTAMP, Int64Type.getDescriptor())
.build();
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/CivilTimeEncoderTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/CivilTimeEncoderTest.java
index 5711a05617..673d3b0eab 100644
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/CivilTimeEncoderTest.java
+++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/CivilTimeEncoderTest.java
@@ -151,7 +151,7 @@ public void decodePacked64TimeMicros_invalidHourOfDay_throwsIllegalArgumentExcep
}
}
- // Date Time
+ // Date Time Tests
@Test
public void encodeAndDecodePacked64DatetimeMicros_validDateTime() {
// 0001/01/01 00:00:00
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriterTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriterTest.java
index 0f68ae9c79..1205aa9615 100644
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriterTest.java
+++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriterTest.java
@@ -48,6 +48,7 @@
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.threeten.bp.Instant;
+import org.threeten.bp.LocalTime;
@RunWith(JUnit4.class)
public class JsonStreamWriterTest {
@@ -410,7 +411,7 @@ public void testSingleAppendComplexJson() throws Exception {
.setTestNumeric("1.23456")
.setTestGeo("POINT(1,1)")
.setTestTimestamp(12345678)
- .setTestTime("01:00:01")
+ .setTestTime(CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(1, 0, 1)))
.build();
JSONObject complex_lvl2 = new JSONObject();
complex_lvl2.put("test_int", 3);
@@ -431,7 +432,7 @@ public void testSingleAppendComplexJson() throws Exception {
json.put("test_numeric", "1.23456");
json.put("test_geo", "POINT(1,1)");
json.put("test_timestamp", 12345678);
- json.put("test_time", "01:00:01");
+ json.put("test_time", CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(1, 0, 1)));
JSONArray jsonArr = new JSONArray();
jsonArr.put(json);
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryTimeEncoderTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryTimeEncoderTest.java
new file mode 100644
index 0000000000..28647b2ccf
--- /dev/null
+++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryTimeEncoderTest.java
@@ -0,0 +1,166 @@
+/*
+ * Copyright 2021 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery.storage.v1beta2.it;
+
+import static org.junit.Assert.assertEquals;
+
+import com.google.api.core.ApiFuture;
+import com.google.cloud.ServiceOptions;
+import com.google.cloud.bigquery.BigQuery;
+import com.google.cloud.bigquery.DatasetInfo;
+import com.google.cloud.bigquery.Field.Mode;
+import com.google.cloud.bigquery.FieldValueList;
+import com.google.cloud.bigquery.Schema;
+import com.google.cloud.bigquery.StandardSQLTypeName;
+import com.google.cloud.bigquery.StandardTableDefinition;
+import com.google.cloud.bigquery.TableId;
+import com.google.cloud.bigquery.TableInfo;
+import com.google.cloud.bigquery.TableResult;
+import com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse;
+import com.google.cloud.bigquery.storage.v1beta2.BigQueryWriteClient;
+import com.google.cloud.bigquery.storage.v1beta2.CivilTimeEncoder;
+import com.google.cloud.bigquery.storage.v1beta2.JsonStreamWriter;
+import com.google.cloud.bigquery.storage.v1beta2.TableName;
+import com.google.cloud.bigquery.testing.RemoteBigQueryHelper;
+import com.google.protobuf.Descriptors;
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.concurrent.ExecutionException;
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.threeten.bp.LocalDateTime;
+import org.threeten.bp.LocalTime;
+
+public class ITBigQueryTimeEncoderTest {
+ private static final String DATASET = RemoteBigQueryHelper.generateDatasetName();
+ private static final String TABLE = "testtable";
+ private static final String DESCRIPTION = "BigQuery Write Java manual client test dataset";
+
+ private static BigQueryWriteClient client;
+ private static TableInfo tableInfo;
+ private static BigQuery bigquery;
+
+ @BeforeClass
+ public static void beforeClass() throws IOException {
+ client = BigQueryWriteClient.create();
+
+ RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create();
+ bigquery = bigqueryHelper.getOptions().getService();
+ DatasetInfo datasetInfo =
+ DatasetInfo.newBuilder(/* datasetId = */ DATASET).setDescription(DESCRIPTION).build();
+ bigquery.create(datasetInfo);
+ tableInfo =
+ TableInfo.newBuilder(
+ TableId.of(DATASET, TABLE),
+ StandardTableDefinition.of(
+ Schema.of(
+ com.google.cloud.bigquery.Field.newBuilder(
+ "test_str", StandardSQLTypeName.STRING)
+ .build(),
+ com.google.cloud.bigquery.Field.newBuilder(
+ "test_time_micros", StandardSQLTypeName.TIME)
+ .setMode(Mode.REPEATED)
+ .build(),
+ com.google.cloud.bigquery.Field.newBuilder(
+ "test_datetime_micros", StandardSQLTypeName.DATETIME)
+ .setMode(Mode.REPEATED)
+ .build())))
+ .build();
+ bigquery.create(tableInfo);
+ }
+
+ @AfterClass
+ public static void afterClass() {
+ if (client != null) {
+ client.close();
+ }
+ if (bigquery != null) {
+ RemoteBigQueryHelper.forceDelete(bigquery, DATASET);
+ }
+ }
+
+ @Test
+ public void TestTimeEncoding()
+ throws IOException, InterruptedException, ExecutionException,
+ Descriptors.DescriptorValidationException {
+ TableName parent = TableName.of(ServiceOptions.getDefaultProjectId(), DATASET, TABLE);
+ try (JsonStreamWriter jsonStreamWriter =
+ JsonStreamWriter.newBuilder(parent.toString(), tableInfo.getDefinition().getSchema())
+ .createDefaultStream()
+ .build()) {
+ JSONObject row = new JSONObject();
+ row.put("test_str", "Start of the day");
+ row.put(
+ "test_time_micros",
+ new JSONArray(
+ new long[] {
+ CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(13, 14, 15, 16_000_000)),
+ CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(23, 59, 59, 999_999_000)),
+ CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(0, 0, 0, 0)),
+ CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(1, 2, 3, 4_000)),
+ CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(5, 6, 7, 8_000))
+ }));
+ row.put(
+ "test_datetime_micros",
+ new JSONArray(
+ new long[] {
+ CivilTimeEncoder.encodePacked64DatetimeMicros(
+ LocalDateTime.of(1, 1, 1, 12, 0, 0, 0)),
+ CivilTimeEncoder.encodePacked64DatetimeMicros(
+ LocalDateTime.of(1995, 5, 19, 10, 30, 45, 0)),
+ CivilTimeEncoder.encodePacked64DatetimeMicros(
+ LocalDateTime.of(2000, 1, 1, 0, 0, 0, 0)),
+ CivilTimeEncoder.encodePacked64DatetimeMicros(
+ LocalDateTime.of(2026, 3, 11, 5, 45, 12, 9_000_000)),
+ CivilTimeEncoder.encodePacked64DatetimeMicros(
+ LocalDateTime.of(2050, 1, 2, 3, 4, 5, 6_000)),
+ }));
+ JSONArray jsonArr = new JSONArray(new JSONObject[] {row});
+ ApiFuture response = jsonStreamWriter.append(jsonArr, -1);
+ Assert.assertFalse(response.get().getAppendResult().hasOffset());
+ TableResult result =
+ bigquery.listTableData(
+ tableInfo.getTableId(), BigQuery.TableDataListOption.startIndex(0L));
+ Iterator iter = result.getValues().iterator();
+ FieldValueList currentRow;
+ currentRow = iter.next();
+ assertEquals("Start of the day", currentRow.get(0).getValue());
+ assertEquals("13:14:15.016000", currentRow.get(1).getRepeatedValue().get(0).getStringValue());
+ assertEquals("23:59:59.999999", currentRow.get(1).getRepeatedValue().get(1).getStringValue());
+ assertEquals("00:00:00", currentRow.get(1).getRepeatedValue().get(2).getStringValue());
+ assertEquals("01:02:03.000004", currentRow.get(1).getRepeatedValue().get(3).getStringValue());
+ assertEquals("05:06:07.000008", currentRow.get(1).getRepeatedValue().get(4).getStringValue());
+
+ assertEquals(
+ "0001-01-01T12:00:00", currentRow.get(2).getRepeatedValue().get(0).getStringValue());
+ assertEquals(
+ "1995-05-19T10:30:45", currentRow.get(2).getRepeatedValue().get(1).getStringValue());
+ assertEquals(
+ "2000-01-01T00:00:00", currentRow.get(2).getRepeatedValue().get(2).getStringValue());
+ assertEquals(
+ "2026-03-11T05:45:12.009000",
+ currentRow.get(2).getRepeatedValue().get(3).getStringValue());
+ assertEquals(
+ "2050-01-02T03:04:05.000006",
+ currentRow.get(2).getRepeatedValue().get(4).getStringValue());
+ }
+ }
+}
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryWriteManualClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryWriteManualClientTest.java
index 2e1fd95ed8..ddfbd08e4d 100644
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryWriteManualClientTest.java
+++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryWriteManualClientTest.java
@@ -41,6 +41,7 @@
import org.junit.BeforeClass;
import org.junit.Test;
import org.threeten.bp.Duration;
+import org.threeten.bp.LocalDateTime;
/** Integration tests for BigQuery Write API. */
public class ITBigQueryWriteManualClientTest {
@@ -240,7 +241,9 @@ public void testJsonStreamWriterCommittedStream()
JSONObject row1 = new JSONObject();
row1.put("test_str", "aaa");
row1.put("test_numerics", new JSONArray(new String[] {"123.4", "-9000000"}));
- row1.put("test_datetime", "2020-10-1 12:00:00");
+ row1.put(
+ "test_datetime",
+ CivilTimeEncoder.encodePacked64DatetimeMicros(LocalDateTime.of(2020, 10, 1, 12, 0)));
JSONArray jsonArr1 = new JSONArray(new JSONObject[] {row1});
ApiFuture response1 = jsonStreamWriter.append(jsonArr1, -1);
@@ -313,7 +316,9 @@ public void testJsonStreamWriterWithDefaultStream()
JSONObject row1 = new JSONObject();
row1.put("test_str", "aaa");
row1.put("test_numerics", new JSONArray(new String[] {"123.4", "-9000000"}));
- row1.put("test_datetime", "2020-10-1 12:00:00");
+ row1.put(
+ "test_datetime",
+ CivilTimeEncoder.encodePacked64DatetimeMicros(LocalDateTime.of(2020, 10, 1, 12, 0)));
JSONArray jsonArr1 = new JSONArray(new JSONObject[] {row1});
ApiFuture response1 = jsonStreamWriter.append(jsonArr1, -1);
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/st/ITBigQueryStorageLongRunningWriteTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/st/ITBigQueryStorageLongRunningWriteTest.java
deleted file mode 100644
index 7e978e7f36..0000000000
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/st/ITBigQueryStorageLongRunningWriteTest.java
+++ /dev/null
@@ -1,181 +0,0 @@
-/*
- * Copyright 2021 Google LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.google.cloud.bigquery.storage.v1beta2.st;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import com.google.api.core.ApiFuture;
-import com.google.cloud.ServiceOptions;
-import com.google.cloud.bigquery.BigQuery;
-import com.google.cloud.bigquery.DatasetInfo;
-import com.google.cloud.bigquery.Field;
-import com.google.cloud.bigquery.FieldValueList;
-import com.google.cloud.bigquery.Schema;
-import com.google.cloud.bigquery.StandardSQLTypeName;
-import com.google.cloud.bigquery.StandardTableDefinition;
-import com.google.cloud.bigquery.TableId;
-import com.google.cloud.bigquery.TableInfo;
-import com.google.cloud.bigquery.TableResult;
-import com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse;
-import com.google.cloud.bigquery.storage.v1beta2.BigQueryWriteClient;
-import com.google.cloud.bigquery.storage.v1beta2.JsonStreamWriter;
-import com.google.cloud.bigquery.storage.v1beta2.TableName;
-import com.google.cloud.bigquery.storage.v1beta2.it.ITBigQueryStorageLongRunningTest;
-import com.google.cloud.bigquery.testing.RemoteBigQueryHelper;
-import com.google.protobuf.Descriptors;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.concurrent.ExecutionException;
-import java.util.logging.Logger;
-import org.json.JSONArray;
-import org.json.JSONObject;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.threeten.bp.LocalDateTime;
-
-public class ITBigQueryStorageLongRunningWriteTest {
- public enum RowComplexity {
- SIMPLE,
- COMPLEX
- }
-
- private static final Logger LOG =
- Logger.getLogger(ITBigQueryStorageLongRunningTest.class.getName());
- private static final String LONG_TESTS_ENABLED_PROPERTY =
- "bigquery.storage.enable_long_running_tests";
- private static final String DESCRIPTION = "BigQuery Write Java long test dataset";
-
- private static String dataset;
- private static BigQueryWriteClient client;
- private static String parentProjectId;
- private static BigQuery bigquery;
-
- private static JSONObject MakeJsonObject(RowComplexity complexity) throws IOException {
- JSONObject object = new JSONObject();
- // size: (1, simple)(2,complex)()
- // TODO(jstocklass): Add option for testing protobuf format using StreamWriter2
- switch (complexity) {
- case SIMPLE:
- object.put("test_str", "aaa");
- object.put("test_numerics", new JSONArray(new String[] {"1234", "-900000"}));
- object.put("test_datetime", String.valueOf(LocalDateTime.now()));
- break;
- case COMPLEX:
- // TODO(jstocklass): make a complex object
- default:
- break;
- }
- return object;
- }
-
- @BeforeClass
- public static void beforeClass() throws IOException {
- parentProjectId = String.format("projects/%s", ServiceOptions.getDefaultProjectId());
-
- client = BigQueryWriteClient.create();
- RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create();
- bigquery = bigqueryHelper.getOptions().getService();
- dataset = RemoteBigQueryHelper.generateDatasetName();
- DatasetInfo datasetInfo =
- DatasetInfo.newBuilder(/* datasetId = */ dataset).setDescription(DESCRIPTION).build();
- LOG.info("Creating dataset: " + dataset);
- bigquery.create(datasetInfo);
- }
-
- @AfterClass
- public static void afterClass() {
- if (client != null) {
- client.close();
- }
- if (bigquery != null && dataset != null) {
- RemoteBigQueryHelper.forceDelete(bigquery, dataset);
- LOG.info("Deleted test dataset: " + dataset);
- }
- }
-
- @Test
- public void testDefaultStreamSimpleSchema()
- throws IOException, InterruptedException, ExecutionException,
- Descriptors.DescriptorValidationException {
- // TODO(jstocklass): Set up a default stream. Write to it for a long time,
- // (a few minutes for now) and make sure that everything goes well, report stats.
- LOG.info(
- String.format(
- "%s tests running with parent project: %s",
- ITBigQueryStorageLongRunningWriteTest.class.getSimpleName(), parentProjectId));
-
- String tableName = "JsonSimpleTableDefaultStream";
- TableInfo tableInfo =
- TableInfo.newBuilder(
- TableId.of(dataset, tableName),
- StandardTableDefinition.of(
- Schema.of(
- com.google.cloud.bigquery.Field.newBuilder(
- "test_str", StandardSQLTypeName.STRING)
- .build(),
- com.google.cloud.bigquery.Field.newBuilder(
- "test_numerics", StandardSQLTypeName.NUMERIC)
- .setMode(Field.Mode.REPEATED)
- .build(),
- com.google.cloud.bigquery.Field.newBuilder(
- "test_datetime", StandardSQLTypeName.DATETIME)
- .build())))
- .build();
- bigquery.create(tableInfo);
-
- int requestLimit = 10;
- long averageLatency = 0;
- long totalLatency = 0;
- TableName parent = TableName.of(ServiceOptions.getDefaultProjectId(), dataset, tableName);
- try (JsonStreamWriter jsonStreamWriter =
- JsonStreamWriter.newBuilder(parent.toString(), tableInfo.getDefinition().getSchema())
- .createDefaultStream()
- .build()) {
- for (int i = 0; i < requestLimit; i++) {
- JSONObject row = MakeJsonObject(RowComplexity.SIMPLE);
- JSONArray jsonArr = new JSONArray(new JSONObject[] {row});
- long startTime = System.nanoTime();
- // TODO(jstocklass): Make asynchronized calls instead of synchronized calls
- ApiFuture response = jsonStreamWriter.append(jsonArr, -1);
- long finishTime = System.nanoTime();
- Assert.assertFalse(response.get().getAppendResult().hasOffset());
- // Ignore first entry, it is way slower than the others and ruins expected behavior
- if (i != 0) {
- totalLatency += (finishTime - startTime);
- }
- }
- averageLatency = totalLatency / requestLimit;
- // TODO(jstocklass): Is there a better way to get this than to log it?
- LOG.info("Simple average Latency: " + String.valueOf(averageLatency) + " ns");
- averageLatency = totalLatency = 0;
-
- TableResult result =
- bigquery.listTableData(
- tableInfo.getTableId(), BigQuery.TableDataListOption.startIndex(0L));
- Iterator iter = result.getValues().iterator();
- FieldValueList currentRow;
- for (int i = 0; i < requestLimit; i++) {
- assertTrue(iter.hasNext());
- currentRow = iter.next();
- assertEquals("aaa", currentRow.get(0).getStringValue());
- }
- assertEquals(false, iter.hasNext());
- }
- }
-}
diff --git a/google-cloud-bigquerystorage/src/test/proto/jsonTest.proto b/google-cloud-bigquerystorage/src/test/proto/jsonTest.proto
index 909cdd8592..2a7643252a 100644
--- a/google-cloud-bigquerystorage/src/test/proto/jsonTest.proto
+++ b/google-cloud-bigquerystorage/src/test/proto/jsonTest.proto
@@ -14,7 +14,7 @@ message ComplexRoot {
optional string test_numeric = 9;
optional string test_geo = 10;
optional int64 test_timestamp = 11;
- optional string test_time = 12;
+ optional int64 test_time = 12;
}
message CasingComplex {
diff --git a/pom.xml b/pom.xml
index 3e8e2f35b4..ecd8710d69 100644
--- a/pom.xml
+++ b/pom.xml
@@ -96,7 +96,7 @@
com.google.cloud
google-cloud-bigquery
- 1.127.10
+ 1.127.9
com.google.api.grpc