diff --git a/CHANGELOG.md b/CHANGELOG.md
index 4bb706c868..7a591e5091 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,25 @@
# Changelog
+### [2.3.3](https://www.github.com/googleapis/java-bigquerystorage/compare/v2.3.2...v2.3.3) (2021-10-05)
+
+
+### Bug Fixes
+
+* add string to DATETIME, TIME, NUMERIC, BIGNUMERIC support in JsonStreamWriter v1 ([#1345](https://www.github.com/googleapis/java-bigquerystorage/issues/1345)) ([9d272dd](https://www.github.com/googleapis/java-bigquerystorage/commit/9d272dd711dfe8909a7b421dabdcdef30f1dc689))
+* JsonWriter accepts string input for DATETIME, TIME, NUMERIC, BIGNUMERIC field ([#1339](https://www.github.com/googleapis/java-bigquerystorage/issues/1339)) ([691f078](https://www.github.com/googleapis/java-bigquerystorage/commit/691f078f09e32fa7784d4afeeee0e8071f78d6fd))
+* Sample should show sending multiple rows in one request ([#1335](https://www.github.com/googleapis/java-bigquerystorage/issues/1335)) ([3f85a68](https://www.github.com/googleapis/java-bigquerystorage/commit/3f85a68d6812aac94ca8a266d76be2aa94cd0b32))
+
+
+### Documentation
+
+* **samples:** Add WriteAPI BUFFERED mode sample ([#1338](https://www.github.com/googleapis/java-bigquerystorage/issues/1338)) ([5dfd523](https://www.github.com/googleapis/java-bigquerystorage/commit/5dfd5231c1ffa68a1b92ba01a9608d8cee2c0596))
+
+
+### Dependencies
+
+* update dependency com.google.cloud:google-cloud-bigquery to v2.2.0 ([#1346](https://www.github.com/googleapis/java-bigquerystorage/issues/1346)) ([9370eb8](https://www.github.com/googleapis/java-bigquerystorage/commit/9370eb8ea74547792f9597d20707546836c417b6))
+* update dependency com.google.cloud:google-cloud-bigquery to v2.2.1 ([#1347](https://www.github.com/googleapis/java-bigquerystorage/issues/1347)) ([406f5d3](https://www.github.com/googleapis/java-bigquerystorage/commit/406f5d3d446d3a9639b2c95590b3c6c38118d741))
+
### [2.3.2](https://www.github.com/googleapis/java-bigquerystorage/compare/v2.3.1...v2.3.2) (2021-10-01)
diff --git a/README.md b/README.md
index 4b52bd4207..d6df72db17 100644
--- a/README.md
+++ b/README.md
@@ -41,7 +41,7 @@ If you are using Maven without BOM, add this to your dependencies:
com.google.cloud
google-cloud-bigquerystorage
- 2.3.1
+ 2.3.2
```
@@ -56,13 +56,13 @@ implementation 'com.google.cloud:google-cloud-bigquerystorage'
If you are using Gradle without BOM, add this to your dependencies
```Groovy
-implementation 'com.google.cloud:google-cloud-bigquerystorage:2.3.1'
+implementation 'com.google.cloud:google-cloud-bigquerystorage:2.3.2'
```
If you are using SBT, add this to your dependencies
```Scala
-libraryDependencies += "com.google.cloud" % "google-cloud-bigquerystorage" % "2.3.1"
+libraryDependencies += "com.google.cloud" % "google-cloud-bigquerystorage" % "2.3.2"
```
## Authentication
@@ -112,6 +112,7 @@ Samples are in the [`samples/`](https://github.com/googleapis/java-bigquerystora
| Parallel Write Committed Stream | [source code](https://github.com/googleapis/java-bigquerystorage/blob/main/samples/snippets/src/main/java/com/example/bigquerystorage/ParallelWriteCommittedStream.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerystorage&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerystorage/ParallelWriteCommittedStream.java) |
| Storage Arrow Sample | [source code](https://github.com/googleapis/java-bigquerystorage/blob/main/samples/snippets/src/main/java/com/example/bigquerystorage/StorageArrowSample.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerystorage&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerystorage/StorageArrowSample.java) |
| Storage Sample | [source code](https://github.com/googleapis/java-bigquerystorage/blob/main/samples/snippets/src/main/java/com/example/bigquerystorage/StorageSample.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerystorage&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerystorage/StorageSample.java) |
+| Write Buffered Stream | [source code](https://github.com/googleapis/java-bigquerystorage/blob/main/samples/snippets/src/main/java/com/example/bigquerystorage/WriteBufferedStream.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerystorage&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerystorage/WriteBufferedStream.java) |
| Write Committed Stream | [source code](https://github.com/googleapis/java-bigquerystorage/blob/main/samples/snippets/src/main/java/com/example/bigquerystorage/WriteCommittedStream.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerystorage&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerystorage/WriteCommittedStream.java) |
| Write Pending Stream | [source code](https://github.com/googleapis/java-bigquerystorage/blob/main/samples/snippets/src/main/java/com/example/bigquerystorage/WritePendingStream.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerystorage&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerystorage/WritePendingStream.java) |
| Write To Default Stream | [source code](https://github.com/googleapis/java-bigquerystorage/blob/main/samples/snippets/src/main/java/com/example/bigquerystorage/WriteToDefaultStream.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquerystorage&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquerystorage/WriteToDefaultStream.java) |
diff --git a/google-cloud-bigquerystorage-bom/pom.xml b/google-cloud-bigquerystorage-bom/pom.xml
index fba3128640..c8ab885c13 100644
--- a/google-cloud-bigquerystorage-bom/pom.xml
+++ b/google-cloud-bigquerystorage-bom/pom.xml
@@ -3,7 +3,7 @@
4.0.0
com.google.cloud
google-cloud-bigquerystorage-bom
- 2.3.2
+ 2.3.3
pom
com.google.cloud
@@ -52,37 +52,37 @@
com.google.cloud
google-cloud-bigquerystorage
- 2.3.2
+ 2.3.3
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1beta1
- 0.127.2
+ 0.127.3
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1beta2
- 0.127.2
+ 0.127.3
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1
- 2.3.2
+ 2.3.3
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1beta1
- 0.127.2
+ 0.127.3
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1beta2
- 0.127.2
+ 0.127.3
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1
- 2.3.2
+ 2.3.3
diff --git a/google-cloud-bigquerystorage/pom.xml b/google-cloud-bigquerystorage/pom.xml
index 64d918dd4b..1a7de66fb8 100644
--- a/google-cloud-bigquerystorage/pom.xml
+++ b/google-cloud-bigquerystorage/pom.xml
@@ -3,7 +3,7 @@
4.0.0
com.google.cloud
google-cloud-bigquerystorage
- 2.3.2
+ 2.3.3
jar
BigQuery Storage
https://github.com/googleapis/java-bigquerystorage
@@ -11,7 +11,7 @@
com.google.cloud
google-cloud-bigquerystorage-parent
- 2.3.2
+ 2.3.3
google-cloud-bigquerystorage
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/JsonStreamWriter.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/JsonStreamWriter.java
index 4a13b4dfcd..6b20e89bcb 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/JsonStreamWriter.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/JsonStreamWriter.java
@@ -75,6 +75,7 @@ private JsonStreamWriter(Builder builder)
builder.traceId);
this.streamWriter = streamWriterBuilder.build();
this.streamName = builder.streamName;
+ this.tableSchema = builder.tableSchema;
}
/**
@@ -105,7 +106,8 @@ public ApiFuture append(JSONArray jsonArr, long offset) {
// of JSON data.
for (int i = 0; i < jsonArr.length(); i++) {
JSONObject json = jsonArr.getJSONObject(i);
- Message protoMessage = JsonToProtoMessage.convertJsonToProtoMessage(this.descriptor, json);
+ Message protoMessage =
+ JsonToProtoMessage.convertJsonToProtoMessage(this.descriptor, this.tableSchema, json);
rowsBuilder.addSerializedRows(protoMessage.toByteString());
}
// Need to make sure refreshAppendAndSetDescriptor finish first before this can run
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/JsonToProtoMessage.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/JsonToProtoMessage.java
index d2cdbab982..df6d26ac8e 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/JsonToProtoMessage.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/JsonToProtoMessage.java
@@ -15,6 +15,7 @@
*/
package com.google.cloud.bigquery.storage.v1;
+import com.google.api.pathtemplate.ValidationException;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.protobuf.ByteString;
@@ -23,10 +24,14 @@
import com.google.protobuf.DynamicMessage;
import com.google.protobuf.Message;
import com.google.protobuf.UninitializedMessageException;
+import java.math.BigDecimal;
+import java.util.List;
import java.util.logging.Logger;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
+import org.threeten.bp.LocalDateTime;
+import org.threeten.bp.LocalTime;
/**
* Converts Json data to protocol buffer messages given the protocol buffer descriptor. The protobuf
@@ -58,7 +63,28 @@ public static DynamicMessage convertJsonToProtoMessage(Descriptor protoSchema, J
Preconditions.checkNotNull(protoSchema, "Protobuf descriptor is null.");
Preconditions.checkState(json.length() != 0, "JSONObject is empty.");
- return convertJsonToProtoMessageImpl(protoSchema, json, "root", /*topLevel=*/ true);
+ return convertJsonToProtoMessageImpl(protoSchema, null, json, "root", /*topLevel=*/ true);
+ }
+
+ /**
+ * Converts Json data to protocol buffer messages given the protocol buffer descriptor.
+ *
+ * @param protoSchema
+ * @param tableSchema bigquery table schema is needed for type conversion of DATETIME, TIME,
+ * NUMERIC, BIGNUMERIC
+ * @param json
+ * @throws IllegalArgumentException when JSON data is not compatible with proto descriptor.
+ */
+ public static DynamicMessage convertJsonToProtoMessage(
+ Descriptor protoSchema, TableSchema tableSchema, JSONObject json)
+ throws IllegalArgumentException {
+ Preconditions.checkNotNull(json, "JSONObject is null.");
+ Preconditions.checkNotNull(protoSchema, "Protobuf descriptor is null.");
+ Preconditions.checkNotNull(tableSchema, "TableSchema is null.");
+ Preconditions.checkState(json.length() != 0, "JSONObject is empty.");
+
+ return convertJsonToProtoMessageImpl(
+ protoSchema, tableSchema.getFieldsList(), json, "root", /*topLevel=*/ true);
}
/**
@@ -71,7 +97,11 @@ public static DynamicMessage convertJsonToProtoMessage(Descriptor protoSchema, J
* @throws IllegalArgumentException when JSON data is not compatible with proto descriptor.
*/
private static DynamicMessage convertJsonToProtoMessageImpl(
- Descriptor protoSchema, JSONObject json, String jsonScope, boolean topLevel)
+ Descriptor protoSchema,
+ List tableSchema,
+ JSONObject json,
+ String jsonScope,
+ boolean topLevel)
throws IllegalArgumentException {
DynamicMessage.Builder protoMsg = DynamicMessage.newBuilder(protoSchema);
@@ -90,10 +120,25 @@ private static DynamicMessage convertJsonToProtoMessageImpl(
throw new IllegalArgumentException(
String.format("JSONObject has fields unknown to BigQuery: %s.", currentScope));
}
+ TableFieldSchema fieldSchema = null;
+ if (tableSchema != null) {
+ // protoSchema is generated from tableSchema so their field ordering should match.
+ fieldSchema = tableSchema.get(field.getIndex());
+ if (!fieldSchema.getName().equals(field.getName())) {
+ throw new ValidationException(
+ "Field at index "
+ + field.getIndex()
+ + " has mismatch names ("
+ + fieldSchema.getName()
+ + ") ("
+ + field.getName()
+ + ")");
+ }
+ }
if (!field.isRepeated()) {
- fillField(protoMsg, field, json, jsonName, currentScope);
+ fillField(protoMsg, field, fieldSchema, json, jsonName, currentScope);
} else {
- fillRepeatedField(protoMsg, field, json, jsonName, currentScope);
+ fillRepeatedField(protoMsg, field, fieldSchema, json, jsonName, currentScope);
}
}
@@ -119,6 +164,7 @@ private static DynamicMessage convertJsonToProtoMessageImpl(
*
* @param protoMsg The protocol buffer message being constructed
* @param fieldDescriptor
+ * @param fieldSchema
* @param json
* @param exactJsonKeyName Exact key name in JSONObject instead of lowercased version
* @param currentScope Debugging purposes
@@ -127,6 +173,7 @@ private static DynamicMessage convertJsonToProtoMessageImpl(
private static void fillField(
DynamicMessage.Builder protoMsg,
FieldDescriptor fieldDescriptor,
+ TableFieldSchema fieldSchema,
JSONObject json,
String exactJsonKeyName,
String currentScope)
@@ -144,6 +191,25 @@ private static void fillField(
}
break;
case BYTES:
+ if (fieldSchema != null) {
+ if (fieldSchema.getType() == TableFieldSchema.Type.NUMERIC) {
+ if (val instanceof String) {
+ protoMsg.setField(
+ fieldDescriptor,
+ BigDecimalByteStringEncoder.encodeToNumericByteString(
+ new BigDecimal((String) val)));
+ return;
+ }
+ } else if (fieldSchema.getType() == TableFieldSchema.Type.BIGNUMERIC) {
+ if (val instanceof String) {
+ protoMsg.setField(
+ fieldDescriptor,
+ BigDecimalByteStringEncoder.encodeToNumericByteString(
+ new BigDecimal((String) val)));
+ return;
+ }
+ }
+ }
if (val instanceof ByteString) {
protoMsg.setField(fieldDescriptor, ((ByteString) val).toByteArray());
return;
@@ -170,6 +236,29 @@ private static void fillField(
}
break;
case INT64:
+ if (fieldSchema != null) {
+ if (fieldSchema.getType() == TableFieldSchema.Type.DATETIME) {
+ if (val instanceof String) {
+ protoMsg.setField(
+ fieldDescriptor,
+ CivilTimeEncoder.encodePacked64DatetimeMicros(LocalDateTime.parse((String) val)));
+ return;
+ } else if (val instanceof Long) {
+ protoMsg.setField(fieldDescriptor, (Long) val);
+ return;
+ }
+ } else if (fieldSchema.getType() == TableFieldSchema.Type.TIME) {
+ if (val instanceof String) {
+ protoMsg.setField(
+ fieldDescriptor,
+ CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.parse((String) val)));
+ return;
+ } else if (val instanceof Long) {
+ protoMsg.setField(fieldDescriptor, (Long) val);
+ return;
+ }
+ }
+ }
if (val instanceof Integer) {
protoMsg.setField(fieldDescriptor, new Long((Integer) val));
return;
@@ -206,6 +295,7 @@ private static void fillField(
fieldDescriptor,
convertJsonToProtoMessageImpl(
fieldDescriptor.getMessageType(),
+ fieldSchema == null ? null : fieldSchema.getFieldsList(),
json.getJSONObject(exactJsonKeyName),
currentScope,
/*topLevel =*/ false));
@@ -224,6 +314,7 @@ private static void fillField(
*
* @param protoMsg The protocol buffer message being constructed
* @param fieldDescriptor
+ * @param fieldSchema
* @param json If root level has no matching fields, throws exception.
* @param exactJsonKeyName Exact key name in JSONObject instead of lowercased version
* @param currentScope Debugging purposes
@@ -232,6 +323,7 @@ private static void fillField(
private static void fillRepeatedField(
DynamicMessage.Builder protoMsg,
FieldDescriptor fieldDescriptor,
+ TableFieldSchema fieldSchema,
JSONObject json,
String exactJsonKeyName,
String currentScope)
@@ -259,40 +351,81 @@ private static void fillRepeatedField(
}
break;
case BYTES:
- if (val instanceof JSONArray) {
- try {
- byte[] bytes = new byte[((JSONArray) val).length()];
- for (int j = 0; j < ((JSONArray) val).length(); j++) {
- bytes[j] = (byte) ((JSONArray) val).getInt(j);
- if (bytes[j] != ((JSONArray) val).getInt(j)) {
- throw new IllegalArgumentException(
- String.format(
- "Error: "
- + currentScope
- + "["
- + index
- + "] could not be converted to byte[]."));
+ Boolean added = false;
+ if (fieldSchema != null && fieldSchema.getType() == TableFieldSchema.Type.NUMERIC) {
+ if (val instanceof String) {
+ protoMsg.addRepeatedField(
+ fieldDescriptor,
+ BigDecimalByteStringEncoder.encodeToNumericByteString(
+ new BigDecimal((String) val)));
+ added = true;
+ }
+ } else if (fieldSchema != null
+ && fieldSchema.getType() == TableFieldSchema.Type.BIGNUMERIC) {
+ if (val instanceof String) {
+ protoMsg.addRepeatedField(
+ fieldDescriptor,
+ BigDecimalByteStringEncoder.encodeToNumericByteString(
+ new BigDecimal((String) val)));
+ added = true;
+ }
+ }
+ if (!added) {
+ if (val instanceof JSONArray) {
+ try {
+ byte[] bytes = new byte[((JSONArray) val).length()];
+ for (int j = 0; j < ((JSONArray) val).length(); j++) {
+ bytes[j] = (byte) ((JSONArray) val).getInt(j);
+ if (bytes[j] != ((JSONArray) val).getInt(j)) {
+ throw new IllegalArgumentException(
+ String.format(
+ "Error: "
+ + currentScope
+ + "["
+ + index
+ + "] could not be converted to byte[]."));
+ }
}
+ protoMsg.addRepeatedField(fieldDescriptor, bytes);
+ } catch (JSONException e) {
+ throw new IllegalArgumentException(
+ String.format(
+ "Error: "
+ + currentScope
+ + "["
+ + index
+ + "] could not be converted to byte[]."));
}
- protoMsg.addRepeatedField(fieldDescriptor, bytes);
- } catch (JSONException e) {
- throw new IllegalArgumentException(
- String.format(
- "Error: "
- + currentScope
- + "["
- + index
- + "] could not be converted to byte[]."));
+ } else if (val instanceof ByteString) {
+ protoMsg.addRepeatedField(fieldDescriptor, ((ByteString) val).toByteArray());
+ return;
+ } else {
+ fail = true;
}
- } else if (val instanceof ByteString) {
- protoMsg.addRepeatedField(fieldDescriptor, ((ByteString) val).toByteArray());
- return;
- } else {
- fail = true;
}
break;
case INT64:
- if (val instanceof Integer) {
+ if (fieldSchema != null && fieldSchema.getType() == TableFieldSchema.Type.DATETIME) {
+ if (val instanceof String) {
+ protoMsg.addRepeatedField(
+ fieldDescriptor,
+ CivilTimeEncoder.encodePacked64DatetimeMicros(LocalDateTime.parse((String) val)));
+ } else if (val instanceof Long) {
+ protoMsg.addRepeatedField(fieldDescriptor, (Long) val);
+ } else {
+ fail = true;
+ }
+ } else if (fieldSchema != null && fieldSchema.getType() == TableFieldSchema.Type.TIME) {
+ if (val instanceof String) {
+ protoMsg.addRepeatedField(
+ fieldDescriptor,
+ CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.parse((String) val)));
+ } else if (val instanceof Long) {
+ protoMsg.addRepeatedField(fieldDescriptor, (Long) val);
+ } else {
+ fail = true;
+ }
+ } else if (val instanceof Integer) {
protoMsg.addRepeatedField(fieldDescriptor, new Long((Integer) val));
} else if (val instanceof Long) {
protoMsg.addRepeatedField(fieldDescriptor, (Long) val);
@@ -330,6 +463,7 @@ private static void fillRepeatedField(
fieldDescriptor,
convertJsonToProtoMessageImpl(
fieldDescriptor.getMessageType(),
+ fieldSchema == null ? null : fieldSchema.getFieldsList(),
jsonArray.getJSONObject(i),
currentScope,
/*topLevel =*/ false));
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriter.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriter.java
index 64687f8732..5837e5c4a5 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriter.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriter.java
@@ -75,6 +75,7 @@ private JsonStreamWriter(Builder builder)
builder.traceId);
this.streamWriter = streamWriterBuilder.build();
this.streamName = builder.streamName;
+ this.tableSchema = builder.tableSchema;
}
/**
@@ -105,7 +106,8 @@ public ApiFuture append(JSONArray jsonArr, long offset) {
// of JSON data.
for (int i = 0; i < jsonArr.length(); i++) {
JSONObject json = jsonArr.getJSONObject(i);
- Message protoMessage = JsonToProtoMessage.convertJsonToProtoMessage(this.descriptor, json);
+ Message protoMessage =
+ JsonToProtoMessage.convertJsonToProtoMessage(this.descriptor, this.tableSchema, json);
rowsBuilder.addSerializedRows(protoMessage.toByteString());
}
// Need to make sure refreshAppendAndSetDescriptor finish first before this can run
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/JsonToProtoMessage.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/JsonToProtoMessage.java
index 5077d7e7d2..5913306b08 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/JsonToProtoMessage.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/JsonToProtoMessage.java
@@ -15,6 +15,7 @@
*/
package com.google.cloud.bigquery.storage.v1beta2;
+import com.google.api.pathtemplate.ValidationException;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.protobuf.ByteString;
@@ -23,10 +24,14 @@
import com.google.protobuf.DynamicMessage;
import com.google.protobuf.Message;
import com.google.protobuf.UninitializedMessageException;
+import java.math.BigDecimal;
+import java.util.List;
import java.util.logging.Logger;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
+import org.threeten.bp.LocalDateTime;
+import org.threeten.bp.LocalTime;
/**
* Converts Json data to protocol buffer messages given the protocol buffer descriptor. The protobuf
@@ -58,7 +63,28 @@ public static DynamicMessage convertJsonToProtoMessage(Descriptor protoSchema, J
Preconditions.checkNotNull(protoSchema, "Protobuf descriptor is null.");
Preconditions.checkState(json.length() != 0, "JSONObject is empty.");
- return convertJsonToProtoMessageImpl(protoSchema, json, "root", /*topLevel=*/ true);
+ return convertJsonToProtoMessageImpl(protoSchema, null, json, "root", /*topLevel=*/ true);
+ }
+
+ /**
+ * Converts Json data to protocol buffer messages given the protocol buffer descriptor.
+ *
+ * @param protoSchema
+ * @param tableSchema bigquery table schema is needed for type conversion of DATETIME, TIME,
+ * NUMERIC, BIGNUMERIC
+ * @param json
+ * @throws IllegalArgumentException when JSON data is not compatible with proto descriptor.
+ */
+ public static DynamicMessage convertJsonToProtoMessage(
+ Descriptor protoSchema, TableSchema tableSchema, JSONObject json)
+ throws IllegalArgumentException {
+ Preconditions.checkNotNull(json, "JSONObject is null.");
+ Preconditions.checkNotNull(protoSchema, "Protobuf descriptor is null.");
+ Preconditions.checkNotNull(tableSchema, "TableSchema is null.");
+ Preconditions.checkState(json.length() != 0, "JSONObject is empty.");
+
+ return convertJsonToProtoMessageImpl(
+ protoSchema, tableSchema.getFieldsList(), json, "root", /*topLevel=*/ true);
}
/**
@@ -71,9 +97,12 @@ public static DynamicMessage convertJsonToProtoMessage(Descriptor protoSchema, J
* @throws IllegalArgumentException when JSON data is not compatible with proto descriptor.
*/
private static DynamicMessage convertJsonToProtoMessageImpl(
- Descriptor protoSchema, JSONObject json, String jsonScope, boolean topLevel)
+ Descriptor protoSchema,
+ List tableSchema,
+ JSONObject json,
+ String jsonScope,
+ boolean topLevel)
throws IllegalArgumentException {
-
DynamicMessage.Builder protoMsg = DynamicMessage.newBuilder(protoSchema);
String[] jsonNames = JSONObject.getNames(json);
if (jsonNames == null) {
@@ -90,10 +119,25 @@ private static DynamicMessage convertJsonToProtoMessageImpl(
throw new IllegalArgumentException(
String.format("JSONObject has fields unknown to BigQuery: %s.", currentScope));
}
+ TableFieldSchema fieldSchema = null;
+ if (tableSchema != null) {
+ // protoSchema is generated from tableSchema so their field ordering should match.
+ fieldSchema = tableSchema.get(field.getIndex());
+ if (!fieldSchema.getName().equals(field.getName())) {
+ throw new ValidationException(
+ "Field at index "
+ + field.getIndex()
+ + " has mismatch names ("
+ + fieldSchema.getName()
+ + ") ("
+ + field.getName()
+ + ")");
+ }
+ }
if (!field.isRepeated()) {
- fillField(protoMsg, field, json, jsonName, currentScope);
+ fillField(protoMsg, field, fieldSchema, json, jsonName, currentScope);
} else {
- fillRepeatedField(protoMsg, field, json, jsonName, currentScope);
+ fillRepeatedField(protoMsg, field, fieldSchema, json, jsonName, currentScope);
}
}
@@ -127,6 +171,7 @@ private static DynamicMessage convertJsonToProtoMessageImpl(
private static void fillField(
DynamicMessage.Builder protoMsg,
FieldDescriptor fieldDescriptor,
+ TableFieldSchema fieldSchema,
JSONObject json,
String exactJsonKeyName,
String currentScope)
@@ -144,6 +189,25 @@ private static void fillField(
}
break;
case BYTES:
+ if (fieldSchema != null) {
+ if (fieldSchema.getType() == TableFieldSchema.Type.NUMERIC) {
+ if (val instanceof String) {
+ protoMsg.setField(
+ fieldDescriptor,
+ BigDecimalByteStringEncoder.encodeToNumericByteString(
+ new BigDecimal((String) val)));
+ return;
+ }
+ } else if (fieldSchema.getType() == TableFieldSchema.Type.BIGNUMERIC) {
+ if (val instanceof String) {
+ protoMsg.setField(
+ fieldDescriptor,
+ BigDecimalByteStringEncoder.encodeToNumericByteString(
+ new BigDecimal((String) val)));
+ return;
+ }
+ }
+ }
if (val instanceof ByteString) {
protoMsg.setField(fieldDescriptor, ((ByteString) val).toByteArray());
return;
@@ -170,6 +234,29 @@ private static void fillField(
}
break;
case INT64:
+ if (fieldSchema != null) {
+ if (fieldSchema.getType() == TableFieldSchema.Type.DATETIME) {
+ if (val instanceof String) {
+ protoMsg.setField(
+ fieldDescriptor,
+ CivilTimeEncoder.encodePacked64DatetimeMicros(LocalDateTime.parse((String) val)));
+ return;
+ } else if (val instanceof Long) {
+ protoMsg.setField(fieldDescriptor, (Long) val);
+ return;
+ }
+ } else if (fieldSchema.getType() == TableFieldSchema.Type.TIME) {
+ if (val instanceof String) {
+ protoMsg.setField(
+ fieldDescriptor,
+ CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.parse((String) val)));
+ return;
+ } else if (val instanceof Long) {
+ protoMsg.setField(fieldDescriptor, (Long) val);
+ return;
+ }
+ }
+ }
if (val instanceof Integer) {
protoMsg.setField(fieldDescriptor, new Long((Integer) val));
return;
@@ -206,6 +293,7 @@ private static void fillField(
fieldDescriptor,
convertJsonToProtoMessageImpl(
fieldDescriptor.getMessageType(),
+ fieldSchema == null ? null : fieldSchema.getFieldsList(),
json.getJSONObject(exactJsonKeyName),
currentScope,
/*topLevel =*/ false));
@@ -232,6 +320,7 @@ private static void fillField(
private static void fillRepeatedField(
DynamicMessage.Builder protoMsg,
FieldDescriptor fieldDescriptor,
+ TableFieldSchema fieldSchema,
JSONObject json,
String exactJsonKeyName,
String currentScope)
@@ -259,40 +348,81 @@ private static void fillRepeatedField(
}
break;
case BYTES:
- if (val instanceof JSONArray) {
- try {
- byte[] bytes = new byte[((JSONArray) val).length()];
- for (int j = 0; j < ((JSONArray) val).length(); j++) {
- bytes[j] = (byte) ((JSONArray) val).getInt(j);
- if (bytes[j] != ((JSONArray) val).getInt(j)) {
- throw new IllegalArgumentException(
- String.format(
- "Error: "
- + currentScope
- + "["
- + index
- + "] could not be converted to byte[]."));
+ Boolean added = false;
+ if (fieldSchema != null && fieldSchema.getType() == TableFieldSchema.Type.NUMERIC) {
+ if (val instanceof String) {
+ protoMsg.addRepeatedField(
+ fieldDescriptor,
+ BigDecimalByteStringEncoder.encodeToNumericByteString(
+ new BigDecimal((String) val)));
+ added = true;
+ }
+ } else if (fieldSchema != null
+ && fieldSchema.getType() == TableFieldSchema.Type.BIGNUMERIC) {
+ if (val instanceof String) {
+ protoMsg.addRepeatedField(
+ fieldDescriptor,
+ BigDecimalByteStringEncoder.encodeToNumericByteString(
+ new BigDecimal((String) val)));
+ added = true;
+ }
+ }
+ if (!added) {
+ if (val instanceof JSONArray) {
+ try {
+ byte[] bytes = new byte[((JSONArray) val).length()];
+ for (int j = 0; j < ((JSONArray) val).length(); j++) {
+ bytes[j] = (byte) ((JSONArray) val).getInt(j);
+ if (bytes[j] != ((JSONArray) val).getInt(j)) {
+ throw new IllegalArgumentException(
+ String.format(
+ "Error: "
+ + currentScope
+ + "["
+ + index
+ + "] could not be converted to byte[]."));
+ }
}
+ protoMsg.addRepeatedField(fieldDescriptor, bytes);
+ } catch (JSONException e) {
+ throw new IllegalArgumentException(
+ String.format(
+ "Error: "
+ + currentScope
+ + "["
+ + index
+ + "] could not be converted to byte[]."));
}
- protoMsg.addRepeatedField(fieldDescriptor, bytes);
- } catch (JSONException e) {
- throw new IllegalArgumentException(
- String.format(
- "Error: "
- + currentScope
- + "["
- + index
- + "] could not be converted to byte[]."));
+ } else if (val instanceof ByteString) {
+ protoMsg.addRepeatedField(fieldDescriptor, ((ByteString) val).toByteArray());
+ return;
+ } else {
+ fail = true;
}
- } else if (val instanceof ByteString) {
- protoMsg.addRepeatedField(fieldDescriptor, ((ByteString) val).toByteArray());
- return;
- } else {
- fail = true;
}
break;
case INT64:
- if (val instanceof Integer) {
+ if (fieldSchema != null && fieldSchema.getType() == TableFieldSchema.Type.DATETIME) {
+ if (val instanceof String) {
+ protoMsg.addRepeatedField(
+ fieldDescriptor,
+ CivilTimeEncoder.encodePacked64DatetimeMicros(LocalDateTime.parse((String) val)));
+ } else if (val instanceof Long) {
+ protoMsg.addRepeatedField(fieldDescriptor, (Long) val);
+ } else {
+ fail = true;
+ }
+ } else if (fieldSchema != null && fieldSchema.getType() == TableFieldSchema.Type.TIME) {
+ if (val instanceof String) {
+ protoMsg.addRepeatedField(
+ fieldDescriptor,
+ CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.parse((String) val)));
+ } else if (val instanceof Long) {
+ protoMsg.addRepeatedField(fieldDescriptor, (Long) val);
+ } else {
+ fail = true;
+ }
+ } else if (val instanceof Integer) {
protoMsg.addRepeatedField(fieldDescriptor, new Long((Integer) val));
} else if (val instanceof Long) {
protoMsg.addRepeatedField(fieldDescriptor, (Long) val);
@@ -330,6 +460,7 @@ private static void fillRepeatedField(
fieldDescriptor,
convertJsonToProtoMessageImpl(
fieldDescriptor.getMessageType(),
+ fieldSchema == null ? null : fieldSchema.getFieldsList(),
jsonArray.getJSONObject(i),
currentScope,
/*topLevel =*/ false));
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BQTableSchemaToProtoDescriptorTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BQTableSchemaToProtoDescriptorTest.java
index 07cb1c8657..3ef5f87d02 100644
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BQTableSchemaToProtoDescriptorTest.java
+++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BQTableSchemaToProtoDescriptorTest.java
@@ -164,6 +164,18 @@ public void testStructComplex() throws Exception {
.setMode(TableFieldSchema.Mode.REQUIRED)
.setName("test_date")
.build();
+ final TableFieldSchema test_datetime =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.DATETIME)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_datetime")
+ .build();
+ final TableFieldSchema test_datetime_str =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.DATETIME)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .setName("test_datetime_str")
+ .build();
final TableFieldSchema ComplexLvl2 =
TableFieldSchema.newBuilder()
.setType(TableFieldSchema.Type.STRUCT)
@@ -203,12 +215,36 @@ public void testStructComplex() throws Exception {
.setMode(TableFieldSchema.Mode.NULLABLE)
.setName("test_time")
.build();
+ final TableFieldSchema TEST_TIME_STR =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.TIME)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_time_str")
+ .build();
final TableFieldSchema TEST_NUMERIC_REPEATED =
TableFieldSchema.newBuilder()
.setType(TableFieldSchema.Type.NUMERIC)
.setMode(TableFieldSchema.Mode.REPEATED)
.setName("test_numeric_repeated")
.build();
+ final TableFieldSchema TEST_NUMERIC_STR =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.NUMERIC)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_numeric_str")
+ .build();
+ final TableFieldSchema TEST_BIGNUMERIC =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.NUMERIC)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_bignumeric")
+ .build();
+ final TableFieldSchema TEST_BIGNUMERIC_STR =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.NUMERIC)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .setName("test_bignumeric_str")
+ .build();
final TableSchema tableSchema =
TableSchema.newBuilder()
.addFields(0, test_int)
@@ -217,13 +253,19 @@ public void testStructComplex() throws Exception {
.addFields(3, test_bool)
.addFields(4, test_double)
.addFields(5, test_date)
- .addFields(6, ComplexLvl1)
- .addFields(7, ComplexLvl2)
- .addFields(8, TEST_NUMERIC)
- .addFields(9, TEST_GEO)
- .addFields(10, TEST_TIMESTAMP)
- .addFields(11, TEST_TIME)
- .addFields(12, TEST_NUMERIC_REPEATED)
+ .addFields(6, test_datetime)
+ .addFields(7, test_datetime_str)
+ .addFields(8, ComplexLvl1)
+ .addFields(9, ComplexLvl2)
+ .addFields(10, TEST_NUMERIC)
+ .addFields(11, TEST_GEO)
+ .addFields(12, TEST_TIMESTAMP)
+ .addFields(13, TEST_TIME)
+ .addFields(14, TEST_TIME_STR)
+ .addFields(15, TEST_NUMERIC_REPEATED)
+ .addFields(16, TEST_NUMERIC_STR)
+ .addFields(17, TEST_BIGNUMERIC)
+ .addFields(18, TEST_BIGNUMERIC_STR)
.build();
final Descriptor descriptor =
BQTableSchemaToProtoDescriptor.convertBQTableSchemaToProtoDescriptor(tableSchema);
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/JsonStreamWriterTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/JsonStreamWriterTest.java
index ea9e7e6f4c..cebd8cc7c2 100644
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/JsonStreamWriterTest.java
+++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/JsonStreamWriterTest.java
@@ -24,14 +24,12 @@
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
-import com.google.cloud.bigquery.storage.test.JsonTest.ComplexRoot;
+import com.google.cloud.bigquery.storage.test.JsonTest;
import com.google.cloud.bigquery.storage.test.Test.FooType;
-import com.google.protobuf.ByteString;
import com.google.protobuf.Descriptors.DescriptorValidationException;
import com.google.protobuf.Int64Value;
import com.google.protobuf.Timestamp;
import java.io.IOException;
-import java.math.BigDecimal;
import java.util.Arrays;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
@@ -96,91 +94,6 @@ public class JsonStreamWriterTest {
.setMode(TableFieldSchema.Mode.REPEATED)
.setName("test_string")
.build();
- private final TableFieldSchema TEST_BYTES =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.BYTES)
- .setMode(TableFieldSchema.Mode.REQUIRED)
- .setName("test_bytes")
- .build();
- private final TableFieldSchema TEST_BOOL =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.BOOL)
- .setMode(TableFieldSchema.Mode.NULLABLE)
- .setName("test_bool")
- .build();
- private final TableFieldSchema TEST_DOUBLE =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.DOUBLE)
- .setMode(TableFieldSchema.Mode.REPEATED)
- .setName("test_double")
- .build();
- private final TableFieldSchema TEST_DATE =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.DATE)
- .setMode(TableFieldSchema.Mode.REQUIRED)
- .setName("test_date")
- .build();
- private final TableFieldSchema COMPLEXLVL2 =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.STRUCT)
- .setMode(TableFieldSchema.Mode.REQUIRED)
- .addFields(0, TEST_INT)
- .setName("complex_lvl2")
- .build();
- private final TableFieldSchema COMPLEXLVL1 =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.STRUCT)
- .setMode(TableFieldSchema.Mode.REQUIRED)
- .addFields(0, TEST_INT)
- .addFields(1, COMPLEXLVL2)
- .setName("complex_lvl1")
- .build();
- private final TableFieldSchema TEST_NUMERIC =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.NUMERIC)
- .setMode(TableFieldSchema.Mode.NULLABLE)
- .setName("test_numeric")
- .build();
- private final TableFieldSchema TEST_NUMERIC_REPEATED =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.NUMERIC)
- .setMode(TableFieldSchema.Mode.REPEATED)
- .setName("test_numeric_repeated")
- .build();
- private final TableFieldSchema TEST_GEO =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.GEOGRAPHY)
- .setMode(TableFieldSchema.Mode.NULLABLE)
- .setName("test_geo")
- .build();
- private final TableFieldSchema TEST_TIMESTAMP =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.TIMESTAMP)
- .setMode(TableFieldSchema.Mode.NULLABLE)
- .setName("test_timestamp")
- .build();
- private final TableFieldSchema TEST_TIME =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.TIME)
- .setMode(TableFieldSchema.Mode.NULLABLE)
- .setName("test_time")
- .build();
- private final TableSchema COMPLEX_TABLE_SCHEMA =
- TableSchema.newBuilder()
- .addFields(0, TEST_INT)
- .addFields(1, TEST_STRING)
- .addFields(2, TEST_BYTES)
- .addFields(3, TEST_BOOL)
- .addFields(4, TEST_DOUBLE)
- .addFields(5, TEST_DATE)
- .addFields(6, COMPLEXLVL1)
- .addFields(7, COMPLEXLVL2)
- .addFields(8, TEST_NUMERIC)
- .addFields(9, TEST_GEO)
- .addFields(10, TEST_TIMESTAMP)
- .addFields(11, TEST_TIME)
- .addFields(12, TEST_NUMERIC_REPEATED)
- .build();
@Before
public void setUp() throws Exception {
@@ -284,6 +197,56 @@ public void testSingleAppendSimpleJson() throws Exception {
}
}
+ @Test
+ public void testSpecialTypeAppend() throws Exception {
+ TableFieldSchema field =
+ TableFieldSchema.newBuilder()
+ .setName("time")
+ .setType(TableFieldSchema.Type.TIME)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .build();
+ TableSchema tableSchema = TableSchema.newBuilder().addFields(field).build();
+
+ JsonTest.TestTime expectedProto =
+ JsonTest.TestTime.newBuilder()
+ .addTime(CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(1, 0, 1)))
+ .build();
+ JSONObject foo = new JSONObject();
+ foo.put("time", new JSONArray(new String[] {"01:00:01"}));
+ JSONArray jsonArr = new JSONArray();
+ jsonArr.put(foo);
+
+ try (JsonStreamWriter writer =
+ getTestJsonStreamWriterBuilder(TEST_STREAM, tableSchema).build()) {
+
+ testBigQueryWrite.addResponse(
+ AppendRowsResponse.newBuilder()
+ .setAppendResult(
+ AppendRowsResponse.AppendResult.newBuilder().setOffset(Int64Value.of(0)).build())
+ .build());
+
+ ApiFuture appendFuture = writer.append(jsonArr);
+ assertEquals(0L, appendFuture.get().getAppendResult().getOffset().getValue());
+ appendFuture.get();
+ assertEquals(
+ 1,
+ testBigQueryWrite
+ .getAppendRequests()
+ .get(0)
+ .getProtoRows()
+ .getRows()
+ .getSerializedRowsCount());
+ assertEquals(
+ testBigQueryWrite
+ .getAppendRequests()
+ .get(0)
+ .getProtoRows()
+ .getRows()
+ .getSerializedRows(0),
+ expectedProto.toByteString());
+ }
+ }
+
@Test
public void testSingleAppendMultipleSimpleJson() throws Exception {
FooType expectedProto = FooType.newBuilder().setFoo("allen").build();
@@ -391,116 +354,6 @@ public void testMultipleAppendSimpleJson() throws Exception {
}
}
- @Test
- public void testSingleAppendComplexJson() throws Exception {
- ComplexRoot expectedProto =
- ComplexRoot.newBuilder()
- .setTestInt(1)
- .addTestString("a")
- .addTestString("b")
- .addTestString("c")
- .setTestBytes(ByteString.copyFrom("hello".getBytes()))
- .setTestBool(true)
- .addTestDouble(1.1)
- .addTestDouble(2.2)
- .addTestDouble(3.3)
- .addTestDouble(4.4)
- .setTestDate(1)
- .setComplexLvl1(
- com.google.cloud.bigquery.storage.test.JsonTest.ComplexLvl1.newBuilder()
- .setTestInt(2)
- .setComplexLvl2(
- com.google.cloud.bigquery.storage.test.JsonTest.ComplexLvl2.newBuilder()
- .setTestInt(3)
- .build())
- .build())
- .setComplexLvl2(
- com.google.cloud.bigquery.storage.test.JsonTest.ComplexLvl2.newBuilder()
- .setTestInt(3)
- .build())
- .setTestNumeric(
- BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("1.23456")))
- .setTestGeo("POINT(1,1)")
- .setTestTimestamp(12345678)
- .setTestTime(CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(1, 0, 1)))
- .addTestNumericRepeated(
- BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("0")))
- .addTestNumericRepeated(
- BigDecimalByteStringEncoder.encodeToNumericByteString(
- new BigDecimal("99999999999999999999999999999.999999999")))
- .addTestNumericRepeated(
- BigDecimalByteStringEncoder.encodeToNumericByteString(
- new BigDecimal("-99999999999999999999999999999.999999999")))
- .build();
- JSONObject complex_lvl2 = new JSONObject();
- complex_lvl2.put("test_int", 3);
-
- JSONObject complex_lvl1 = new JSONObject();
- complex_lvl1.put("test_int", 2);
- complex_lvl1.put("complex_lvl2", complex_lvl2);
-
- JSONObject json = new JSONObject();
- json.put("test_int", 1);
- json.put("test_string", new JSONArray(new String[] {"a", "b", "c"}));
- json.put("test_bytes", ByteString.copyFrom("hello".getBytes()));
- json.put("test_bool", true);
- json.put("test_DOUBLe", new JSONArray(new Double[] {1.1, 2.2, 3.3, 4.4}));
- json.put("test_date", 1);
- json.put("complex_lvl1", complex_lvl1);
- json.put("complex_lvl2", complex_lvl2);
- json.put(
- "test_numeric",
- BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("1.23456")));
- json.put(
- "test_numeric_repeated",
- new JSONArray(
- new byte[][] {
- BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("0"))
- .toByteArray(),
- BigDecimalByteStringEncoder.encodeToNumericByteString(
- new BigDecimal("99999999999999999999999999999.999999999"))
- .toByteArray(),
- BigDecimalByteStringEncoder.encodeToNumericByteString(
- new BigDecimal("-99999999999999999999999999999.999999999"))
- .toByteArray(),
- }));
- json.put("test_geo", "POINT(1,1)");
- json.put("test_timestamp", 12345678);
- json.put("test_time", CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(1, 0, 1)));
- JSONArray jsonArr = new JSONArray();
- jsonArr.put(json);
-
- try (JsonStreamWriter writer =
- getTestJsonStreamWriterBuilder(TEST_STREAM, COMPLEX_TABLE_SCHEMA).build()) {
- testBigQueryWrite.addResponse(
- AppendRowsResponse.newBuilder()
- .setAppendResult(
- AppendRowsResponse.AppendResult.newBuilder().setOffset(Int64Value.of(0)).build())
- .build());
-
- ApiFuture appendFuture = writer.append(jsonArr);
-
- assertEquals(0L, appendFuture.get().getAppendResult().getOffset().getValue());
- appendFuture.get();
- assertEquals(
- 1,
- testBigQueryWrite
- .getAppendRequests()
- .get(0)
- .getProtoRows()
- .getRows()
- .getSerializedRowsCount());
- assertEquals(
- testBigQueryWrite
- .getAppendRequests()
- .get(0)
- .getProtoRows()
- .getRows()
- .getSerializedRows(0),
- expectedProto.toByteString());
- }
- }
-
@Test
public void testAppendOutOfRangeException() throws Exception {
try (JsonStreamWriter writer =
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/JsonToProtoMessageTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/JsonToProtoMessageTest.java
index b8eba3c893..db5b14f73d 100644
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/JsonToProtoMessageTest.java
+++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/JsonToProtoMessageTest.java
@@ -35,6 +35,7 @@
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
+import org.threeten.bp.LocalTime;
@RunWith(JUnit4.class)
public class JsonToProtoMessageTest {
@@ -276,6 +277,145 @@ public class JsonToProtoMessageTest {
new JSONObject().put("test_int", 3)
}))
};
+ private final TableFieldSchema TEST_INT =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.INT64)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_int")
+ .build();
+ private final TableFieldSchema TEST_STRING =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.STRING)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .setName("test_string")
+ .build();
+ private final TableFieldSchema TEST_BYTES =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.BYTES)
+ .setMode(TableFieldSchema.Mode.REQUIRED)
+ .setName("test_bytes")
+ .build();
+ private final TableFieldSchema TEST_BOOL =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.BOOL)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_bool")
+ .build();
+ private final TableFieldSchema TEST_DOUBLE =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.DOUBLE)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .setName("test_double")
+ .build();
+ private final TableFieldSchema TEST_DATE =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.DATE)
+ .setMode(TableFieldSchema.Mode.REQUIRED)
+ .setName("test_date")
+ .build();
+ private final TableFieldSchema TEST_DATETIME =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.DATETIME)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_datetime")
+ .build();
+ private final TableFieldSchema TEST_DATETIME_STR =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.DATETIME)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .setName("test_datetime_str")
+ .build();
+ private final TableFieldSchema COMPLEXLVL2 =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.STRUCT)
+ .setMode(TableFieldSchema.Mode.REQUIRED)
+ .addFields(0, TEST_INT)
+ .setName("complex_lvl2")
+ .build();
+ private final TableFieldSchema COMPLEXLVL1 =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.STRUCT)
+ .setMode(TableFieldSchema.Mode.REQUIRED)
+ .addFields(0, TEST_INT)
+ .addFields(1, COMPLEXLVL2)
+ .setName("complex_lvl1")
+ .build();
+ private final TableFieldSchema TEST_NUMERIC =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.NUMERIC)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_numeric")
+ .build();
+ private final TableFieldSchema TEST_NUMERIC_REPEATED =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.NUMERIC)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .setName("test_numeric_repeated")
+ .build();
+ private final TableFieldSchema TEST_GEO =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.GEOGRAPHY)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_geo")
+ .build();
+ private final TableFieldSchema TEST_TIMESTAMP =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.TIMESTAMP)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_timestamp")
+ .build();
+ private final TableFieldSchema TEST_TIME =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.TIME)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_time")
+ .build();
+ private final TableFieldSchema TEST_TIME_STR =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.TIME)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_time_str")
+ .build();
+ private final TableFieldSchema TEST_NUMERIC_STR =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.NUMERIC)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_numeric_str")
+ .build();
+ private final TableFieldSchema TEST_BIGNUMERIC =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.NUMERIC)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_bignumeric")
+ .build();
+ private final TableFieldSchema TEST_BIGNUMERIC_STR =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.NUMERIC)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .setName("test_bignumeric_str")
+ .build();
+ private final TableSchema COMPLEX_TABLE_SCHEMA =
+ TableSchema.newBuilder()
+ .addFields(0, TEST_INT)
+ .addFields(1, TEST_STRING)
+ .addFields(2, TEST_BYTES)
+ .addFields(3, TEST_BOOL)
+ .addFields(4, TEST_DOUBLE)
+ .addFields(5, TEST_DATE)
+ .addFields(6, TEST_DATETIME)
+ .addFields(7, TEST_DATETIME_STR)
+ .addFields(8, COMPLEXLVL1)
+ .addFields(9, COMPLEXLVL2)
+ .addFields(10, TEST_NUMERIC)
+ .addFields(11, TEST_GEO)
+ .addFields(12, TEST_TIMESTAMP)
+ .addFields(13, TEST_TIME)
+ .addFields(14, TEST_TIME_STR)
+ .addFields(15, TEST_NUMERIC_REPEATED)
+ .addFields(16, TEST_NUMERIC_STR)
+ .addFields(17, TEST_BIGNUMERIC)
+ .addFields(18, TEST_BIGNUMERIC_STR)
+ .build();
@Test
public void testDifferentNameCasing() throws Exception {
@@ -333,6 +473,89 @@ public void testInt32NotMatchInt64() throws Exception {
}
}
+ @Test
+ public void testDateTimeMismatch() throws Exception {
+ TableFieldSchema field =
+ TableFieldSchema.newBuilder()
+ .setName("datetime")
+ .setType(TableFieldSchema.Type.DATETIME)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .build();
+ TableSchema tableSchema = TableSchema.newBuilder().addFields(field).build();
+ JSONObject json = new JSONObject();
+ json.put("datetime", 1.0);
+ try {
+ DynamicMessage protoMsg =
+ JsonToProtoMessage.convertJsonToProtoMessage(
+ TestDatetime.getDescriptor(), tableSchema, json);
+ Assert.fail("should fail");
+ } catch (IllegalArgumentException e) {
+ assertEquals("JSONObject does not have a int64 field at root.datetime.", e.getMessage());
+ }
+ }
+
+ @Test
+ public void testTimeMismatch() throws Exception {
+ TableFieldSchema field =
+ TableFieldSchema.newBuilder()
+ .setName("time")
+ .setType(TableFieldSchema.Type.TIME)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .build();
+ TableSchema tableSchema = TableSchema.newBuilder().addFields(field).build();
+ JSONObject json = new JSONObject();
+ json.put("time", new JSONArray(new Double[] {1.0}));
+ try {
+ DynamicMessage protoMsg =
+ JsonToProtoMessage.convertJsonToProtoMessage(TestTime.getDescriptor(), tableSchema, json);
+ Assert.fail("should fail");
+ } catch (IllegalArgumentException e) {
+ assertEquals("JSONObject does not have a int64 field at root.time[0].", e.getMessage());
+ }
+ }
+
+ @Test
+ public void testNumericMismatch() throws Exception {
+ TableFieldSchema field =
+ TableFieldSchema.newBuilder()
+ .setName("numeric")
+ .setType(TableFieldSchema.Type.NUMERIC)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .build();
+ TableSchema tableSchema = TableSchema.newBuilder().addFields(field).build();
+ JSONObject json = new JSONObject();
+ json.put("numeric", 1.0);
+ try {
+ DynamicMessage protoMsg =
+ JsonToProtoMessage.convertJsonToProtoMessage(
+ TestNumeric.getDescriptor(), tableSchema, json);
+ Assert.fail("should fail");
+ } catch (IllegalArgumentException e) {
+ assertEquals("JSONObject does not have a bytes field at root.numeric.", e.getMessage());
+ }
+ }
+
+ @Test
+ public void testBigNumericMismatch() throws Exception {
+ TableFieldSchema field =
+ TableFieldSchema.newBuilder()
+ .setName("bignumeric")
+ .setType(TableFieldSchema.Type.BIGNUMERIC)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .build();
+ TableSchema tableSchema = TableSchema.newBuilder().addFields(field).build();
+ JSONObject json = new JSONObject();
+ json.put("bignumeric", new JSONArray(new Double[] {1.0}));
+ try {
+ DynamicMessage protoMsg =
+ JsonToProtoMessage.convertJsonToProtoMessage(
+ TestBignumeric.getDescriptor(), tableSchema, json);
+ Assert.fail("should fail");
+ } catch (IllegalArgumentException e) {
+ assertEquals("JSONObject does not have a bytes field at root.bignumeric[0].", e.getMessage());
+ }
+ }
+
@Test
public void testDouble() throws Exception {
TestDouble expectedProto = TestDouble.newBuilder().setDouble(1.2).setFloat(3.4f).build();
@@ -491,12 +714,35 @@ public void testStructComplex() throws Exception {
.addTestDouble(3.3)
.addTestDouble(4.4)
.setTestDate(1)
+ .setTestDatetime(1)
+ .addTestDatetimeStr(142258614586538368L)
+ .addTestDatetimeStr(142258525253402624L)
.setComplexLvl1(
ComplexLvl1.newBuilder()
.setTestInt(2)
.setComplexLvl2(ComplexLvl2.newBuilder().setTestInt(3).build())
.build())
.setComplexLvl2(ComplexLvl2.newBuilder().setTestInt(3).build())
+ .setTestNumeric(
+ BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("1.23456")))
+ .setTestGeo("POINT(1,1)")
+ .setTestTimestamp(12345678)
+ .setTestTime(CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(1, 0, 1)))
+ .setTestTimeStr(89332507144L)
+ .addTestNumericRepeated(
+ BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("0")))
+ .addTestNumericRepeated(
+ BigDecimalByteStringEncoder.encodeToNumericByteString(
+ new BigDecimal("99999999999999999999999999999.999999999")))
+ .addTestNumericRepeated(
+ BigDecimalByteStringEncoder.encodeToNumericByteString(
+ new BigDecimal("-99999999999999999999999999999.999999999")))
+ .setTestNumericStr(
+ BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("12.4")))
+ .setTestBignumeric(
+ BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("2.3")))
+ .addTestBignumericStr(
+ BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("1.23")))
.build();
JSONObject complex_lvl2 = new JSONObject();
complex_lvl2.put("test_int", 3);
@@ -512,11 +758,40 @@ public void testStructComplex() throws Exception {
json.put("test_bool", true);
json.put("test_DOUBLe", new JSONArray(new Double[] {1.1, 2.2, 3.3, 4.4}));
json.put("test_date", 1);
+ json.put("test_datetime", 1);
+ json.put(
+ "test_datetime_str",
+ new JSONArray(new String[] {"2021-09-27T20:51:10.752", "2021-09-27T00:00:00"}));
json.put("complex_lvl1", complex_lvl1);
json.put("complex_lvl2", complex_lvl2);
-
+ json.put(
+ "test_numeric",
+ BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("1.23456")));
+ json.put(
+ "test_numeric_repeated",
+ new JSONArray(
+ new byte[][] {
+ BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("0"))
+ .toByteArray(),
+ BigDecimalByteStringEncoder.encodeToNumericByteString(
+ new BigDecimal("99999999999999999999999999999.999999999"))
+ .toByteArray(),
+ BigDecimalByteStringEncoder.encodeToNumericByteString(
+ new BigDecimal("-99999999999999999999999999999.999999999"))
+ .toByteArray(),
+ }));
+ json.put("test_geo", "POINT(1,1)");
+ json.put("test_timestamp", 12345678);
+ json.put("test_time", CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(1, 0, 1)));
+ json.put("test_time_str", "20:51:10.1234");
+ json.put("test_numeric_str", "12.4");
+ json.put(
+ "test_bignumeric",
+ BigDecimalByteStringEncoder.encodeToNumericByteString(BigDecimal.valueOf(2.3)));
+ json.put("test_bignumeric_str", new JSONArray(new String[] {"1.23"}));
DynamicMessage protoMsg =
- JsonToProtoMessage.convertJsonToProtoMessage(ComplexRoot.getDescriptor(), json);
+ JsonToProtoMessage.convertJsonToProtoMessage(
+ ComplexRoot.getDescriptor(), COMPLEX_TABLE_SCHEMA, json);
assertEquals(expectedProto, protoMsg);
}
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BQTableSchemaToProtoDescriptorTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BQTableSchemaToProtoDescriptorTest.java
index 29b065f44b..c6e0973d58 100644
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BQTableSchemaToProtoDescriptorTest.java
+++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BQTableSchemaToProtoDescriptorTest.java
@@ -164,6 +164,18 @@ public void testStructComplex() throws Exception {
.setMode(TableFieldSchema.Mode.REQUIRED)
.setName("test_date")
.build();
+ final TableFieldSchema test_datetime =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.DATETIME)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_datetime")
+ .build();
+ final TableFieldSchema test_datetime_str =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.DATETIME)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .setName("test_datetime_str")
+ .build();
final TableFieldSchema ComplexLvl2 =
TableFieldSchema.newBuilder()
.setType(TableFieldSchema.Type.STRUCT)
@@ -203,12 +215,36 @@ public void testStructComplex() throws Exception {
.setMode(TableFieldSchema.Mode.NULLABLE)
.setName("test_time")
.build();
+ final TableFieldSchema TEST_TIME_STR =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.TIME)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_time_str")
+ .build();
final TableFieldSchema TEST_NUMERIC_REPEATED =
TableFieldSchema.newBuilder()
.setType(TableFieldSchema.Type.NUMERIC)
.setMode(TableFieldSchema.Mode.REPEATED)
.setName("test_numeric_repeated")
.build();
+ final TableFieldSchema TEST_NUMERIC_STR =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.NUMERIC)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_numeric_str")
+ .build();
+ final TableFieldSchema TEST_BIGNUMERIC =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.NUMERIC)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_bignumeric")
+ .build();
+ final TableFieldSchema TEST_BIGNUMERIC_STR =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.NUMERIC)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .setName("test_bignumeric_str")
+ .build();
final TableSchema tableSchema =
TableSchema.newBuilder()
.addFields(0, test_int)
@@ -217,13 +253,19 @@ public void testStructComplex() throws Exception {
.addFields(3, test_bool)
.addFields(4, test_double)
.addFields(5, test_date)
- .addFields(6, ComplexLvl1)
- .addFields(7, ComplexLvl2)
- .addFields(8, TEST_NUMERIC)
- .addFields(9, TEST_GEO)
- .addFields(10, TEST_TIMESTAMP)
- .addFields(11, TEST_TIME)
- .addFields(12, TEST_NUMERIC_REPEATED)
+ .addFields(6, test_datetime)
+ .addFields(7, test_datetime_str)
+ .addFields(8, ComplexLvl1)
+ .addFields(9, ComplexLvl2)
+ .addFields(10, TEST_NUMERIC)
+ .addFields(11, TEST_GEO)
+ .addFields(12, TEST_TIMESTAMP)
+ .addFields(13, TEST_TIME)
+ .addFields(14, TEST_TIME_STR)
+ .addFields(15, TEST_NUMERIC_REPEATED)
+ .addFields(16, TEST_NUMERIC_STR)
+ .addFields(17, TEST_BIGNUMERIC)
+ .addFields(18, TEST_BIGNUMERIC_STR)
.build();
final Descriptor descriptor =
BQTableSchemaToProtoDescriptor.convertBQTableSchemaToProtoDescriptor(tableSchema);
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriterTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriterTest.java
index dbc7460163..85c83dbf25 100644
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriterTest.java
+++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriterTest.java
@@ -24,14 +24,12 @@
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
-import com.google.cloud.bigquery.storage.test.JsonTest.ComplexRoot;
+import com.google.cloud.bigquery.storage.test.JsonTest;
import com.google.cloud.bigquery.storage.test.Test.FooType;
-import com.google.protobuf.ByteString;
import com.google.protobuf.Descriptors.DescriptorValidationException;
import com.google.protobuf.Int64Value;
import com.google.protobuf.Timestamp;
import java.io.IOException;
-import java.math.BigDecimal;
import java.util.Arrays;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
@@ -96,91 +94,6 @@ public class JsonStreamWriterTest {
.setMode(TableFieldSchema.Mode.REPEATED)
.setName("test_string")
.build();
- private final TableFieldSchema TEST_BYTES =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.BYTES)
- .setMode(TableFieldSchema.Mode.REQUIRED)
- .setName("test_bytes")
- .build();
- private final TableFieldSchema TEST_BOOL =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.BOOL)
- .setMode(TableFieldSchema.Mode.NULLABLE)
- .setName("test_bool")
- .build();
- private final TableFieldSchema TEST_DOUBLE =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.DOUBLE)
- .setMode(TableFieldSchema.Mode.REPEATED)
- .setName("test_double")
- .build();
- private final TableFieldSchema TEST_DATE =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.DATE)
- .setMode(TableFieldSchema.Mode.REQUIRED)
- .setName("test_date")
- .build();
- private final TableFieldSchema COMPLEXLVL2 =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.STRUCT)
- .setMode(TableFieldSchema.Mode.REQUIRED)
- .addFields(0, TEST_INT)
- .setName("complex_lvl2")
- .build();
- private final TableFieldSchema COMPLEXLVL1 =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.STRUCT)
- .setMode(TableFieldSchema.Mode.REQUIRED)
- .addFields(0, TEST_INT)
- .addFields(1, COMPLEXLVL2)
- .setName("complex_lvl1")
- .build();
- private final TableFieldSchema TEST_NUMERIC =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.NUMERIC)
- .setMode(TableFieldSchema.Mode.NULLABLE)
- .setName("test_numeric")
- .build();
- private final TableFieldSchema TEST_NUMERIC_REPEATED =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.NUMERIC)
- .setMode(TableFieldSchema.Mode.REPEATED)
- .setName("test_numeric_repeated")
- .build();
- private final TableFieldSchema TEST_GEO =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.GEOGRAPHY)
- .setMode(TableFieldSchema.Mode.NULLABLE)
- .setName("test_geo")
- .build();
- private final TableFieldSchema TEST_TIMESTAMP =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.TIMESTAMP)
- .setMode(TableFieldSchema.Mode.NULLABLE)
- .setName("test_timestamp")
- .build();
- private final TableFieldSchema TEST_TIME =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.TIME)
- .setMode(TableFieldSchema.Mode.NULLABLE)
- .setName("test_time")
- .build();
- private final TableSchema COMPLEX_TABLE_SCHEMA =
- TableSchema.newBuilder()
- .addFields(0, TEST_INT)
- .addFields(1, TEST_STRING)
- .addFields(2, TEST_BYTES)
- .addFields(3, TEST_BOOL)
- .addFields(4, TEST_DOUBLE)
- .addFields(5, TEST_DATE)
- .addFields(6, COMPLEXLVL1)
- .addFields(7, COMPLEXLVL2)
- .addFields(8, TEST_NUMERIC)
- .addFields(9, TEST_GEO)
- .addFields(10, TEST_TIMESTAMP)
- .addFields(11, TEST_TIME)
- .addFields(12, TEST_NUMERIC_REPEATED)
- .build();
@Before
public void setUp() throws Exception {
@@ -284,6 +197,56 @@ public void testSingleAppendSimpleJson() throws Exception {
}
}
+ @Test
+ public void testSpecialTypeAppend() throws Exception {
+ TableFieldSchema field =
+ TableFieldSchema.newBuilder()
+ .setName("time")
+ .setType(TableFieldSchema.Type.TIME)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .build();
+ TableSchema tableSchema = TableSchema.newBuilder().addFields(field).build();
+
+ JsonTest.TestTime expectedProto =
+ JsonTest.TestTime.newBuilder()
+ .addTime(CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(1, 0, 1)))
+ .build();
+ JSONObject foo = new JSONObject();
+ foo.put("time", new JSONArray(new String[] {"01:00:01"}));
+ JSONArray jsonArr = new JSONArray();
+ jsonArr.put(foo);
+
+ try (JsonStreamWriter writer =
+ getTestJsonStreamWriterBuilder(TEST_STREAM, tableSchema).build()) {
+
+ testBigQueryWrite.addResponse(
+ AppendRowsResponse.newBuilder()
+ .setAppendResult(
+ AppendRowsResponse.AppendResult.newBuilder().setOffset(Int64Value.of(0)).build())
+ .build());
+
+ ApiFuture appendFuture = writer.append(jsonArr);
+ assertEquals(0L, appendFuture.get().getAppendResult().getOffset().getValue());
+ appendFuture.get();
+ assertEquals(
+ 1,
+ testBigQueryWrite
+ .getAppendRequests()
+ .get(0)
+ .getProtoRows()
+ .getRows()
+ .getSerializedRowsCount());
+ assertEquals(
+ testBigQueryWrite
+ .getAppendRequests()
+ .get(0)
+ .getProtoRows()
+ .getRows()
+ .getSerializedRows(0),
+ expectedProto.toByteString());
+ }
+ }
+
@Test
public void testSingleAppendMultipleSimpleJson() throws Exception {
FooType expectedProto = FooType.newBuilder().setFoo("allen").build();
@@ -391,116 +354,6 @@ public void testMultipleAppendSimpleJson() throws Exception {
}
}
- @Test
- public void testSingleAppendComplexJson() throws Exception {
- ComplexRoot expectedProto =
- ComplexRoot.newBuilder()
- .setTestInt(1)
- .addTestString("a")
- .addTestString("b")
- .addTestString("c")
- .setTestBytes(ByteString.copyFrom("hello".getBytes()))
- .setTestBool(true)
- .addTestDouble(1.1)
- .addTestDouble(2.2)
- .addTestDouble(3.3)
- .addTestDouble(4.4)
- .setTestDate(1)
- .setComplexLvl1(
- com.google.cloud.bigquery.storage.test.JsonTest.ComplexLvl1.newBuilder()
- .setTestInt(2)
- .setComplexLvl2(
- com.google.cloud.bigquery.storage.test.JsonTest.ComplexLvl2.newBuilder()
- .setTestInt(3)
- .build())
- .build())
- .setComplexLvl2(
- com.google.cloud.bigquery.storage.test.JsonTest.ComplexLvl2.newBuilder()
- .setTestInt(3)
- .build())
- .setTestNumeric(
- BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("1.23456")))
- .setTestGeo("POINT(1,1)")
- .setTestTimestamp(12345678)
- .setTestTime(CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(1, 0, 1)))
- .addTestNumericRepeated(
- BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("0")))
- .addTestNumericRepeated(
- BigDecimalByteStringEncoder.encodeToNumericByteString(
- new BigDecimal("99999999999999999999999999999.999999999")))
- .addTestNumericRepeated(
- BigDecimalByteStringEncoder.encodeToNumericByteString(
- new BigDecimal("-99999999999999999999999999999.999999999")))
- .build();
- JSONObject complex_lvl2 = new JSONObject();
- complex_lvl2.put("test_int", 3);
-
- JSONObject complex_lvl1 = new JSONObject();
- complex_lvl1.put("test_int", 2);
- complex_lvl1.put("complex_lvl2", complex_lvl2);
-
- JSONObject json = new JSONObject();
- json.put("test_int", 1);
- json.put("test_string", new JSONArray(new String[] {"a", "b", "c"}));
- json.put("test_bytes", ByteString.copyFrom("hello".getBytes()));
- json.put("test_bool", true);
- json.put("test_DOUBLe", new JSONArray(new Double[] {1.1, 2.2, 3.3, 4.4}));
- json.put("test_date", 1);
- json.put("complex_lvl1", complex_lvl1);
- json.put("complex_lvl2", complex_lvl2);
- json.put(
- "test_numeric",
- BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("1.23456")));
- json.put(
- "test_numeric_repeated",
- new JSONArray(
- new byte[][] {
- BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("0"))
- .toByteArray(),
- BigDecimalByteStringEncoder.encodeToNumericByteString(
- new BigDecimal("99999999999999999999999999999.999999999"))
- .toByteArray(),
- BigDecimalByteStringEncoder.encodeToNumericByteString(
- new BigDecimal("-99999999999999999999999999999.999999999"))
- .toByteArray(),
- }));
- json.put("test_geo", "POINT(1,1)");
- json.put("test_timestamp", 12345678);
- json.put("test_time", CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(1, 0, 1)));
- JSONArray jsonArr = new JSONArray();
- jsonArr.put(json);
-
- try (JsonStreamWriter writer =
- getTestJsonStreamWriterBuilder(TEST_STREAM, COMPLEX_TABLE_SCHEMA).build()) {
- testBigQueryWrite.addResponse(
- AppendRowsResponse.newBuilder()
- .setAppendResult(
- AppendRowsResponse.AppendResult.newBuilder().setOffset(Int64Value.of(0)).build())
- .build());
-
- ApiFuture appendFuture = writer.append(jsonArr);
-
- assertEquals(0L, appendFuture.get().getAppendResult().getOffset().getValue());
- appendFuture.get();
- assertEquals(
- 1,
- testBigQueryWrite
- .getAppendRequests()
- .get(0)
- .getProtoRows()
- .getRows()
- .getSerializedRowsCount());
- assertEquals(
- testBigQueryWrite
- .getAppendRequests()
- .get(0)
- .getProtoRows()
- .getRows()
- .getSerializedRows(0),
- expectedProto.toByteString());
- }
- }
-
@Test
public void testAppendOutOfRangeException() throws Exception {
try (JsonStreamWriter writer =
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/JsonToProtoMessageTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/JsonToProtoMessageTest.java
index 74097a661b..89848b0e61 100644
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/JsonToProtoMessageTest.java
+++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/JsonToProtoMessageTest.java
@@ -35,6 +35,7 @@
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
+import org.threeten.bp.LocalTime;
@RunWith(JUnit4.class)
public class JsonToProtoMessageTest {
@@ -277,6 +278,146 @@ public class JsonToProtoMessageTest {
}))
};
+ private final TableFieldSchema TEST_INT =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.INT64)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_int")
+ .build();
+ private final TableFieldSchema TEST_STRING =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.STRING)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .setName("test_string")
+ .build();
+ private final TableFieldSchema TEST_BYTES =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.BYTES)
+ .setMode(TableFieldSchema.Mode.REQUIRED)
+ .setName("test_bytes")
+ .build();
+ private final TableFieldSchema TEST_BOOL =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.BOOL)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_bool")
+ .build();
+ private final TableFieldSchema TEST_DOUBLE =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.DOUBLE)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .setName("test_double")
+ .build();
+ private final TableFieldSchema TEST_DATE =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.DATE)
+ .setMode(TableFieldSchema.Mode.REQUIRED)
+ .setName("test_date")
+ .build();
+ private final TableFieldSchema TEST_DATETIME =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.DATETIME)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_datetime")
+ .build();
+ private final TableFieldSchema TEST_DATETIME_STR =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.DATETIME)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .setName("test_datetime_str")
+ .build();
+ private final TableFieldSchema COMPLEXLVL2 =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.STRUCT)
+ .setMode(TableFieldSchema.Mode.REQUIRED)
+ .addFields(0, TEST_INT)
+ .setName("complex_lvl2")
+ .build();
+ private final TableFieldSchema COMPLEXLVL1 =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.STRUCT)
+ .setMode(TableFieldSchema.Mode.REQUIRED)
+ .addFields(0, TEST_INT)
+ .addFields(1, COMPLEXLVL2)
+ .setName("complex_lvl1")
+ .build();
+ private final TableFieldSchema TEST_NUMERIC =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.NUMERIC)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_numeric")
+ .build();
+ private final TableFieldSchema TEST_NUMERIC_REPEATED =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.NUMERIC)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .setName("test_numeric_repeated")
+ .build();
+ private final TableFieldSchema TEST_GEO =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.GEOGRAPHY)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_geo")
+ .build();
+ private final TableFieldSchema TEST_TIMESTAMP =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.TIMESTAMP)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_timestamp")
+ .build();
+ private final TableFieldSchema TEST_TIME =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.TIME)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_time")
+ .build();
+ private final TableFieldSchema TEST_TIME_STR =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.TIME)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_time_str")
+ .build();
+ private final TableFieldSchema TEST_NUMERIC_STR =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.NUMERIC)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_numeric_str")
+ .build();
+ private final TableFieldSchema TEST_BIGNUMERIC =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.NUMERIC)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .setName("test_bignumeric")
+ .build();
+ private final TableFieldSchema TEST_BIGNUMERIC_STR =
+ TableFieldSchema.newBuilder()
+ .setType(TableFieldSchema.Type.NUMERIC)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .setName("test_bignumeric_str")
+ .build();
+ private final TableSchema COMPLEX_TABLE_SCHEMA =
+ TableSchema.newBuilder()
+ .addFields(0, TEST_INT)
+ .addFields(1, TEST_STRING)
+ .addFields(2, TEST_BYTES)
+ .addFields(3, TEST_BOOL)
+ .addFields(4, TEST_DOUBLE)
+ .addFields(5, TEST_DATE)
+ .addFields(6, TEST_DATETIME)
+ .addFields(7, TEST_DATETIME_STR)
+ .addFields(8, COMPLEXLVL1)
+ .addFields(9, COMPLEXLVL2)
+ .addFields(10, TEST_NUMERIC)
+ .addFields(11, TEST_GEO)
+ .addFields(12, TEST_TIMESTAMP)
+ .addFields(13, TEST_TIME)
+ .addFields(14, TEST_TIME_STR)
+ .addFields(15, TEST_NUMERIC_REPEATED)
+ .addFields(16, TEST_NUMERIC_STR)
+ .addFields(17, TEST_BIGNUMERIC)
+ .addFields(18, TEST_BIGNUMERIC_STR)
+ .build();
+
@Test
public void testDifferentNameCasing() throws Exception {
TestInt64 expectedProto =
@@ -333,6 +474,89 @@ public void testInt32NotMatchInt64() throws Exception {
}
}
+ @Test
+ public void testDateTimeMismatch() throws Exception {
+ TableFieldSchema field =
+ TableFieldSchema.newBuilder()
+ .setName("datetime")
+ .setType(TableFieldSchema.Type.DATETIME)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .build();
+ TableSchema tableSchema = TableSchema.newBuilder().addFields(field).build();
+ JSONObject json = new JSONObject();
+ json.put("datetime", 1.0);
+ try {
+ DynamicMessage protoMsg =
+ JsonToProtoMessage.convertJsonToProtoMessage(
+ TestDatetime.getDescriptor(), tableSchema, json);
+ Assert.fail("should fail");
+ } catch (IllegalArgumentException e) {
+ assertEquals("JSONObject does not have a int64 field at root.datetime.", e.getMessage());
+ }
+ }
+
+ @Test
+ public void testTimeMismatch() throws Exception {
+ TableFieldSchema field =
+ TableFieldSchema.newBuilder()
+ .setName("time")
+ .setType(TableFieldSchema.Type.TIME)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .build();
+ TableSchema tableSchema = TableSchema.newBuilder().addFields(field).build();
+ JSONObject json = new JSONObject();
+ json.put("time", new JSONArray(new Double[] {1.0}));
+ try {
+ DynamicMessage protoMsg =
+ JsonToProtoMessage.convertJsonToProtoMessage(TestTime.getDescriptor(), tableSchema, json);
+ Assert.fail("should fail");
+ } catch (IllegalArgumentException e) {
+ assertEquals("JSONObject does not have a int64 field at root.time[0].", e.getMessage());
+ }
+ }
+
+ @Test
+ public void testNumericMismatch() throws Exception {
+ TableFieldSchema field =
+ TableFieldSchema.newBuilder()
+ .setName("numeric")
+ .setType(TableFieldSchema.Type.NUMERIC)
+ .setMode(TableFieldSchema.Mode.NULLABLE)
+ .build();
+ TableSchema tableSchema = TableSchema.newBuilder().addFields(field).build();
+ JSONObject json = new JSONObject();
+ json.put("numeric", 1.0);
+ try {
+ DynamicMessage protoMsg =
+ JsonToProtoMessage.convertJsonToProtoMessage(
+ TestNumeric.getDescriptor(), tableSchema, json);
+ Assert.fail("should fail");
+ } catch (IllegalArgumentException e) {
+ assertEquals("JSONObject does not have a bytes field at root.numeric.", e.getMessage());
+ }
+ }
+
+ @Test
+ public void testBigNumericMismatch() throws Exception {
+ TableFieldSchema field =
+ TableFieldSchema.newBuilder()
+ .setName("bignumeric")
+ .setType(TableFieldSchema.Type.BIGNUMERIC)
+ .setMode(TableFieldSchema.Mode.REPEATED)
+ .build();
+ TableSchema tableSchema = TableSchema.newBuilder().addFields(field).build();
+ JSONObject json = new JSONObject();
+ json.put("bignumeric", new JSONArray(new Double[] {1.0}));
+ try {
+ DynamicMessage protoMsg =
+ JsonToProtoMessage.convertJsonToProtoMessage(
+ TestBignumeric.getDescriptor(), tableSchema, json);
+ Assert.fail("should fail");
+ } catch (IllegalArgumentException e) {
+ assertEquals("JSONObject does not have a bytes field at root.bignumeric[0].", e.getMessage());
+ }
+ }
+
@Test
public void testDouble() throws Exception {
TestDouble expectedProto = TestDouble.newBuilder().setDouble(1.2).setFloat(3.4f).build();
@@ -491,12 +715,35 @@ public void testStructComplex() throws Exception {
.addTestDouble(3.3)
.addTestDouble(4.4)
.setTestDate(1)
+ .setTestDatetime(1)
+ .addTestDatetimeStr(142258614586538368L)
+ .addTestDatetimeStr(142258525253402624L)
.setComplexLvl1(
ComplexLvl1.newBuilder()
.setTestInt(2)
.setComplexLvl2(ComplexLvl2.newBuilder().setTestInt(3).build())
.build())
.setComplexLvl2(ComplexLvl2.newBuilder().setTestInt(3).build())
+ .setTestNumeric(
+ BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("1.23456")))
+ .setTestGeo("POINT(1,1)")
+ .setTestTimestamp(12345678)
+ .setTestTime(CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(1, 0, 1)))
+ .setTestTimeStr(89332507144L)
+ .addTestNumericRepeated(
+ BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("0")))
+ .addTestNumericRepeated(
+ BigDecimalByteStringEncoder.encodeToNumericByteString(
+ new BigDecimal("99999999999999999999999999999.999999999")))
+ .addTestNumericRepeated(
+ BigDecimalByteStringEncoder.encodeToNumericByteString(
+ new BigDecimal("-99999999999999999999999999999.999999999")))
+ .setTestNumericStr(
+ BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("12.4")))
+ .setTestBignumeric(
+ BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("2.3")))
+ .addTestBignumericStr(
+ BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("1.23")))
.build();
JSONObject complex_lvl2 = new JSONObject();
complex_lvl2.put("test_int", 3);
@@ -512,11 +759,40 @@ public void testStructComplex() throws Exception {
json.put("test_bool", true);
json.put("test_DOUBLe", new JSONArray(new Double[] {1.1, 2.2, 3.3, 4.4}));
json.put("test_date", 1);
+ json.put("test_datetime", 1);
+ json.put(
+ "test_datetime_str",
+ new JSONArray(new String[] {"2021-09-27T20:51:10.752", "2021-09-27T00:00:00"}));
json.put("complex_lvl1", complex_lvl1);
json.put("complex_lvl2", complex_lvl2);
-
+ json.put(
+ "test_numeric",
+ BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("1.23456")));
+ json.put(
+ "test_numeric_repeated",
+ new JSONArray(
+ new byte[][] {
+ BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("0"))
+ .toByteArray(),
+ BigDecimalByteStringEncoder.encodeToNumericByteString(
+ new BigDecimal("99999999999999999999999999999.999999999"))
+ .toByteArray(),
+ BigDecimalByteStringEncoder.encodeToNumericByteString(
+ new BigDecimal("-99999999999999999999999999999.999999999"))
+ .toByteArray(),
+ }));
+ json.put("test_geo", "POINT(1,1)");
+ json.put("test_timestamp", 12345678);
+ json.put("test_time", CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(1, 0, 1)));
+ json.put("test_time_str", "20:51:10.1234");
+ json.put("test_numeric_str", "12.4");
+ json.put(
+ "test_bignumeric",
+ BigDecimalByteStringEncoder.encodeToNumericByteString(BigDecimal.valueOf(2.3)));
+ json.put("test_bignumeric_str", new JSONArray(new String[] {"1.23"}));
DynamicMessage protoMsg =
- JsonToProtoMessage.convertJsonToProtoMessage(ComplexRoot.getDescriptor(), json);
+ JsonToProtoMessage.convertJsonToProtoMessage(
+ ComplexRoot.getDescriptor(), COMPLEX_TABLE_SCHEMA, json);
assertEquals(expectedProto, protoMsg);
}
diff --git a/google-cloud-bigquerystorage/src/test/proto/jsonTest.proto b/google-cloud-bigquerystorage/src/test/proto/jsonTest.proto
index ecffb2e0a8..b929604004 100644
--- a/google-cloud-bigquerystorage/src/test/proto/jsonTest.proto
+++ b/google-cloud-bigquerystorage/src/test/proto/jsonTest.proto
@@ -9,13 +9,20 @@ message ComplexRoot {
optional bool test_bool = 4;
repeated double test_double = 5;
required int32 test_date = 6;
- required ComplexLvl1 complex_lvl1 = 7;
- required ComplexLvl2 complex_lvl2 = 8;
- optional bytes test_numeric = 9;
- optional string test_geo = 10;
- optional int64 test_timestamp = 11;
- optional int64 test_time = 12;
- repeated bytes test_numeric_repeated = 13;
+ optional int64 test_datetime = 7;
+ repeated int64 test_datetime_str = 8;
+ required ComplexLvl1 complex_lvl1 = 9;
+ required
+ ComplexLvl2 complex_lvl2 = 10;
+ optional bytes test_numeric = 11;
+ optional string test_geo = 12;
+ optional int64 test_timestamp = 13;
+ optional int64 test_time = 14;
+ optional int64 test_time_str = 15;
+ repeated bytes test_numeric_repeated = 16;
+ optional bytes test_numeric_str = 17;
+ optional bytes test_bignumeric = 18;
+ repeated bytes test_bignumeric_str = 19;
}
message CasingComplex {
@@ -131,3 +138,19 @@ message TestRepeatedIsOptional {
message TopLevelMismatch {
optional double mismatch_double = 1;
}
+
+message TestDatetime {
+ optional int64 datetime = 1;
+}
+
+message TestTime {
+ repeated int64 time = 1;
+}
+
+message TestNumeric {
+ optional bytes numeric = 1;
+}
+
+message TestBignumeric {
+ repeated bytes bignumeric = 1;
+}
diff --git a/grpc-google-cloud-bigquerystorage-v1/pom.xml b/grpc-google-cloud-bigquerystorage-v1/pom.xml
index cbf728ca92..3a6f5c673c 100644
--- a/grpc-google-cloud-bigquerystorage-v1/pom.xml
+++ b/grpc-google-cloud-bigquerystorage-v1/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1
- 2.3.2
+ 2.3.3
grpc-google-cloud-bigquerystorage-v1
GRPC library for grpc-google-cloud-bigquerystorage-v1
com.google.cloud
google-cloud-bigquerystorage-parent
- 2.3.2
+ 2.3.3
diff --git a/grpc-google-cloud-bigquerystorage-v1beta1/pom.xml b/grpc-google-cloud-bigquerystorage-v1beta1/pom.xml
index 25d63ba616..b9005919fa 100644
--- a/grpc-google-cloud-bigquerystorage-v1beta1/pom.xml
+++ b/grpc-google-cloud-bigquerystorage-v1beta1/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1beta1
- 0.127.2
+ 0.127.3
grpc-google-cloud-bigquerystorage-v1beta1
GRPC library for grpc-google-cloud-bigquerystorage-v1beta1
com.google.cloud
google-cloud-bigquerystorage-parent
- 2.3.2
+ 2.3.3
diff --git a/grpc-google-cloud-bigquerystorage-v1beta2/pom.xml b/grpc-google-cloud-bigquerystorage-v1beta2/pom.xml
index 4db0cf9188..b488f9ca41 100644
--- a/grpc-google-cloud-bigquerystorage-v1beta2/pom.xml
+++ b/grpc-google-cloud-bigquerystorage-v1beta2/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1beta2
- 0.127.2
+ 0.127.3
grpc-google-cloud-bigquerystorage-v1beta2
GRPC library for grpc-google-cloud-bigquerystorage-v1beta2
com.google.cloud
google-cloud-bigquerystorage-parent
- 2.3.2
+ 2.3.3
diff --git a/pom.xml b/pom.xml
index 903c4a34e0..75aa0f64e8 100644
--- a/pom.xml
+++ b/pom.xml
@@ -4,7 +4,7 @@
com.google.cloud
google-cloud-bigquerystorage-parent
pom
- 2.3.2
+ 2.3.3
BigQuery Storage Parent
https://github.com/googleapis/java-bigquerystorage
@@ -83,37 +83,37 @@
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1beta1
- 0.127.2
+ 0.127.3
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1beta2
- 0.127.2
+ 0.127.3
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1
- 2.3.2
+ 2.3.3
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1beta1
- 0.127.2
+ 0.127.3
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1beta2
- 0.127.2
+ 0.127.3
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1
- 2.3.2
+ 2.3.3
com.google.cloud
google-cloud-bigquerystorage
- 2.3.2
+ 2.3.3
org.json
@@ -132,7 +132,7 @@
com.google.cloud
google-cloud-bigquery
- 2.1.13
+ 2.2.1
test
diff --git a/proto-google-cloud-bigquerystorage-v1/pom.xml b/proto-google-cloud-bigquerystorage-v1/pom.xml
index 6c6d534900..4b75ce11ac 100644
--- a/proto-google-cloud-bigquerystorage-v1/pom.xml
+++ b/proto-google-cloud-bigquerystorage-v1/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1
- 2.3.2
+ 2.3.3
proto-google-cloud-bigquerystorage-v1
PROTO library for proto-google-cloud-bigquerystorage-v1
com.google.cloud
google-cloud-bigquerystorage-parent
- 2.3.2
+ 2.3.3
diff --git a/proto-google-cloud-bigquerystorage-v1beta1/pom.xml b/proto-google-cloud-bigquerystorage-v1beta1/pom.xml
index a74833166d..63a287f4e2 100644
--- a/proto-google-cloud-bigquerystorage-v1beta1/pom.xml
+++ b/proto-google-cloud-bigquerystorage-v1beta1/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1beta1
- 0.127.2
+ 0.127.3
proto-google-cloud-bigquerystorage-v1beta1
PROTO library for proto-google-cloud-bigquerystorage-v1beta1
com.google.cloud
google-cloud-bigquerystorage-parent
- 2.3.2
+ 2.3.3
diff --git a/proto-google-cloud-bigquerystorage-v1beta2/pom.xml b/proto-google-cloud-bigquerystorage-v1beta2/pom.xml
index b7f7a4dc7c..59d0ec0e36 100644
--- a/proto-google-cloud-bigquerystorage-v1beta2/pom.xml
+++ b/proto-google-cloud-bigquerystorage-v1beta2/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1beta2
- 0.127.2
+ 0.127.3
proto-google-cloud-bigquerystorage-v1beta2
PROTO library for proto-google-cloud-bigquerystorage-v1beta2
com.google.cloud
google-cloud-bigquerystorage-parent
- 2.3.2
+ 2.3.3
diff --git a/samples/install-without-bom/pom.xml b/samples/install-without-bom/pom.xml
index 0d6eb1f92b..500d1bc3d5 100644
--- a/samples/install-without-bom/pom.xml
+++ b/samples/install-without-bom/pom.xml
@@ -30,14 +30,14 @@
com.google.cloud
google-cloud-bigquerystorage
- 2.3.1
+ 2.3.2
com.google.cloud
google-cloud-bigquery
- 2.1.13
+ 2.2.1
org.apache.avro
diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml
index e6552ac3b7..f51dd99c32 100644
--- a/samples/snapshot/pom.xml
+++ b/samples/snapshot/pom.xml
@@ -29,14 +29,14 @@
com.google.cloud
google-cloud-bigquerystorage
- 2.3.2
+ 2.3.3
com.google.cloud
google-cloud-bigquery
- 2.1.13
+ 2.2.1
org.apache.avro
diff --git a/samples/snippets/pom.xml b/samples/snippets/pom.xml
index dcb441b6d4..14e6d1fa2c 100644
--- a/samples/snippets/pom.xml
+++ b/samples/snippets/pom.xml
@@ -48,7 +48,7 @@
com.google.cloud
google-cloud-bigquery
- 2.1.13
+ 2.2.1
org.apache.avro
diff --git a/samples/snippets/src/main/java/com/example/bigquerystorage/WriteBufferedStream.java b/samples/snippets/src/main/java/com/example/bigquerystorage/WriteBufferedStream.java
new file mode 100644
index 0000000000..18a426621d
--- /dev/null
+++ b/samples/snippets/src/main/java/com/example/bigquerystorage/WriteBufferedStream.java
@@ -0,0 +1,100 @@
+/*
+ * Copyright 2020 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.example.bigquerystorage;
+
+// [START bigquerystorage_jsonstreamwriter_buffered]
+import com.google.api.core.ApiFuture;
+import com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse;
+import com.google.cloud.bigquery.storage.v1beta2.BigQueryWriteClient;
+import com.google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest;
+import com.google.cloud.bigquery.storage.v1beta2.FlushRowsRequest;
+import com.google.cloud.bigquery.storage.v1beta2.FlushRowsResponse;
+import com.google.cloud.bigquery.storage.v1beta2.JsonStreamWriter;
+import com.google.cloud.bigquery.storage.v1beta2.TableName;
+import com.google.cloud.bigquery.storage.v1beta2.WriteStream;
+import com.google.protobuf.Descriptors.DescriptorValidationException;
+import com.google.protobuf.Int64Value;
+import java.io.IOException;
+import java.util.concurrent.ExecutionException;
+import org.json.JSONArray;
+import org.json.JSONObject;
+
+public class WriteBufferedStream {
+
+ public static void runWriteBufferedStream()
+ throws DescriptorValidationException, InterruptedException, IOException {
+ // TODO(developer): Replace these variables before running the sample.
+ String projectId = "MY_PROJECT_ID";
+ String datasetName = "MY_DATASET_NAME";
+ String tableName = "MY_TABLE_NAME";
+
+ writeBufferedStream(projectId, datasetName, tableName);
+ }
+
+ public static void writeBufferedStream(String projectId, String datasetName, String tableName)
+ throws DescriptorValidationException, InterruptedException, IOException {
+ try (BigQueryWriteClient client = BigQueryWriteClient.create()) {
+ // Initialize a write stream for the specified table.
+ // For more information on WriteStream.Type, see:
+ // https://googleapis.dev/java/google-cloud-bigquerystorage/latest/com/google/cloud/bigquery/storage/v1beta2/WriteStream.Type.html
+ WriteStream stream = WriteStream.newBuilder().setType(WriteStream.Type.BUFFERED).build();
+ TableName parentTable = TableName.of(projectId, datasetName, tableName);
+ CreateWriteStreamRequest createWriteStreamRequest =
+ CreateWriteStreamRequest.newBuilder()
+ .setParent(parentTable.toString())
+ .setWriteStream(stream)
+ .build();
+ WriteStream writeStream = client.createWriteStream(createWriteStreamRequest);
+
+ // Use the JSON stream writer to send records in JSON format.
+ // For more information about JsonStreamWriter, see:
+ // https://googleapis.dev/java/google-cloud-bigquerystorage/latest/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriter.html
+ try (JsonStreamWriter writer =
+ JsonStreamWriter.newBuilder(writeStream.getName(), writeStream.getTableSchema())
+ .build()) {
+ // Write two batches to the stream, each with 10 JSON records.
+ for (int i = 0; i < 2; i++) {
+ JSONArray jsonArr = new JSONArray();
+ for (int j = 0; j < 10; j++) {
+ // Create a JSON object that is compatible with the table schema.
+ JSONObject record = new JSONObject();
+ record.put("col1", String.format("buffered-record %03d", i));
+ jsonArr.put(record);
+ }
+ ApiFuture future = writer.append(jsonArr);
+ AppendRowsResponse response = future.get();
+ }
+
+ // Flush the buffer.
+ FlushRowsRequest flushRowsRequest =
+ FlushRowsRequest.newBuilder()
+ .setWriteStream(writeStream.getName())
+ .setOffset(Int64Value.of(10 * 2 - 1)) // Advance the cursor to the latest record.
+ .build();
+ FlushRowsResponse flushRowsResponse = client.flushRows(flushRowsRequest);
+ // You can continue to write to the stream after flushing the buffer.
+ }
+ System.out.println("Appended and committed records successfully.");
+ } catch (ExecutionException e) {
+ // If the wrapped exception is a StatusRuntimeException, check the state of the operation.
+ // If the state is INTERNAL, CANCELLED, or ABORTED, you can retry. For more information, see:
+ // https://grpc.github.io/grpc-java/javadoc/io/grpc/StatusRuntimeException.html
+ System.out.println(e);
+ }
+ }
+}
+// [END bigquerystorage_jsonstreamwriter_buffered]
diff --git a/samples/snippets/src/main/java/com/example/bigquerystorage/WriteCommittedStream.java b/samples/snippets/src/main/java/com/example/bigquerystorage/WriteCommittedStream.java
index ea1ac9ccd1..63ff782924 100644
--- a/samples/snippets/src/main/java/com/example/bigquerystorage/WriteCommittedStream.java
+++ b/samples/snippets/src/main/java/com/example/bigquerystorage/WriteCommittedStream.java
@@ -64,17 +64,19 @@ public static void writeCommittedStream(String projectId, String datasetName, St
try (JsonStreamWriter writer =
JsonStreamWriter.newBuilder(writeStream.getName(), writeStream.getTableSchema())
.build()) {
- // Append 10 JSON objects to the stream.
- for (int i = 0; i < 10; i++) {
+ // Write two batches to the stream, each with 10 JSON records.
+ for (int i = 0; i < 2; i++) {
// Create a JSON object that is compatible with the table schema.
- JSONObject record = new JSONObject();
- record.put("col1", String.format("record %03d", i));
JSONArray jsonArr = new JSONArray();
- jsonArr.put(record);
+ for (int j = 0; j < 10; j++) {
+ JSONObject record = new JSONObject();
+ record.put("col1", String.format("record %03d-%03d", i, j));
+ jsonArr.put(record);
+ }
// To detect duplicate records, pass the index as the record offset.
// To disable deduplication, omit the offset or use WriteStream.Type.DEFAULT.
- ApiFuture future = writer.append(jsonArr, /*offset=*/ i);
+ ApiFuture future = writer.append(jsonArr, /*offset=*/ i * 10);
AppendRowsResponse response = future.get();
}
}
diff --git a/samples/snippets/src/main/java/com/example/bigquerystorage/WritePendingStream.java b/samples/snippets/src/main/java/com/example/bigquerystorage/WritePendingStream.java
index fc7b4c1af5..386f08a5d0 100644
--- a/samples/snippets/src/main/java/com/example/bigquerystorage/WritePendingStream.java
+++ b/samples/snippets/src/main/java/com/example/bigquerystorage/WritePendingStream.java
@@ -67,14 +67,15 @@ public static void writePendingStream(String projectId, String datasetName, Stri
try (JsonStreamWriter writer =
JsonStreamWriter.newBuilder(writeStream.getName(), writeStream.getTableSchema())
.build()) {
- // Append 10 JSON objects to the stream.
- for (int i = 0; i < 10; i++) {
+ // Write two batches to the stream, each with 10 JSON records.
+ for (int i = 0; i < 2; i++) {
// Create a JSON object that is compatible with the table schema.
- JSONObject record = new JSONObject();
- record.put("col1", String.format("batch-record %03d", i));
JSONArray jsonArr = new JSONArray();
- jsonArr.put(record);
-
+ for (int j = 0; j < 10; j++) {
+ JSONObject record = new JSONObject();
+ record.put("col1", String.format("batch-record %03d-%03d", i, j));
+ jsonArr.put(record);
+ }
ApiFuture future = writer.append(jsonArr);
AppendRowsResponse response = future.get();
}
diff --git a/samples/snippets/src/main/java/com/example/bigquerystorage/WriteToDefaultStream.java b/samples/snippets/src/main/java/com/example/bigquerystorage/WriteToDefaultStream.java
index 2241c91bba..a06b113b3b 100644
--- a/samples/snippets/src/main/java/com/example/bigquerystorage/WriteToDefaultStream.java
+++ b/samples/snippets/src/main/java/com/example/bigquerystorage/WriteToDefaultStream.java
@@ -56,14 +56,15 @@ public static void writeToDefaultStream(String projectId, String datasetName, St
// https://googleapis.dev/java/google-cloud-bigquerystorage/latest/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriter.html
try (JsonStreamWriter writer =
JsonStreamWriter.newBuilder(parentTable.toString(), tableSchema).build()) {
- // Append 10 JSON objects to the stream.
- for (int i = 0; i < 10; i++) {
+ // Write two batches to the stream, each with 10 JSON records.
+ for (int i = 0; i < 2; i++) {
// Create a JSON object that is compatible with the table schema.
- JSONObject record = new JSONObject();
- record.put("test_string", String.format("record %03d", i));
JSONArray jsonArr = new JSONArray();
- jsonArr.put(record);
-
+ for (int j = 0; j < 10; j++) {
+ JSONObject record = new JSONObject();
+ record.put("test_string", String.format("record %03d-%03d", i, j));
+ jsonArr.put(record);
+ }
ApiFuture future = writer.append(jsonArr);
AppendRowsResponse response = future.get();
}
diff --git a/samples/snippets/src/test/java/com/example/bigquerystorage/WriteBufferedStreamIT.java b/samples/snippets/src/test/java/com/example/bigquerystorage/WriteBufferedStreamIT.java
new file mode 100644
index 0000000000..2a7460247d
--- /dev/null
+++ b/samples/snippets/src/test/java/com/example/bigquerystorage/WriteBufferedStreamIT.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2020 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.example.bigquerystorage;
+
+import static com.google.common.truth.Truth.assertThat;
+import static junit.framework.TestCase.assertNotNull;
+
+import com.google.cloud.bigquery.BigQuery;
+import com.google.cloud.bigquery.BigQuery.DatasetDeleteOption;
+import com.google.cloud.bigquery.BigQueryOptions;
+import com.google.cloud.bigquery.DatasetId;
+import com.google.cloud.bigquery.DatasetInfo;
+import com.google.cloud.bigquery.Field;
+import com.google.cloud.bigquery.QueryJobConfiguration;
+import com.google.cloud.bigquery.Schema;
+import com.google.cloud.bigquery.StandardSQLTypeName;
+import com.google.cloud.bigquery.StandardTableDefinition;
+import com.google.cloud.bigquery.TableId;
+import com.google.cloud.bigquery.TableInfo;
+import com.google.cloud.bigquery.TableResult;
+import java.io.ByteArrayOutputStream;
+import java.io.PrintStream;
+import java.util.UUID;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+@RunWith(JUnit4.class)
+public class WriteBufferedStreamIT {
+
+ private static final String GOOGLE_CLOUD_PROJECT = System.getenv("GOOGLE_CLOUD_PROJECT");
+
+ private ByteArrayOutputStream bout;
+ private PrintStream out;
+ private BigQuery bigquery;
+ private String datasetName;
+ private String tableName;
+
+ private static void requireEnvVar(String varName) {
+ assertNotNull(
+ "Environment variable " + varName + " is required to perform these tests.",
+ System.getenv(varName));
+ }
+
+ @BeforeClass
+ public static void checkRequirements() {
+ requireEnvVar("GOOGLE_CLOUD_PROJECT");
+ }
+
+ @Before
+ public void setUp() {
+ bout = new ByteArrayOutputStream();
+ out = new PrintStream(bout);
+ System.setOut(out);
+
+ bigquery = BigQueryOptions.getDefaultInstance().getService();
+
+ // Create a new dataset and table for each test.
+ datasetName = "WRITE_STREAM_TEST" + UUID.randomUUID().toString().substring(0, 8);
+ tableName = "PENDING_STREAM_TEST" + UUID.randomUUID().toString().substring(0, 8);
+ Schema schema = Schema.of(Field.of("col1", StandardSQLTypeName.STRING));
+ bigquery.create(DatasetInfo.newBuilder(datasetName).build());
+ TableInfo tableInfo =
+ TableInfo.newBuilder(TableId.of(datasetName, tableName), StandardTableDefinition.of(schema))
+ .build();
+ bigquery.create(tableInfo);
+ }
+
+ @After
+ public void tearDown() {
+ bigquery.delete(
+ DatasetId.of(GOOGLE_CLOUD_PROJECT, datasetName), DatasetDeleteOption.deleteContents());
+ System.setOut(null);
+ }
+
+ @Test
+ public void testWriteBufferedStream() throws Exception {
+ WriteBufferedStream.writeBufferedStream(GOOGLE_CLOUD_PROJECT, datasetName, tableName);
+ assertThat(bout.toString()).contains("Appended and committed records successfully.");
+
+ // Verify that the records are visible in the table.
+ String query = "SELECT * FROM " + tableName;
+ QueryJobConfiguration queryConfig =
+ QueryJobConfiguration.newBuilder(query).setDefaultDataset(datasetName).build();
+ TableResult result = bigquery.query(queryConfig);
+ assertThat(result.getTotalRows()).isEqualTo(20);
+ }
+}
diff --git a/versions.txt b/versions.txt
index 694a23f49e..0e9e27573b 100644
--- a/versions.txt
+++ b/versions.txt
@@ -1,10 +1,10 @@
# Format:
# module:released-version:current-version
-google-cloud-bigquerystorage:2.3.2:2.3.2
-grpc-google-cloud-bigquerystorage-v1beta1:0.127.2:0.127.2
-grpc-google-cloud-bigquerystorage-v1beta2:0.127.2:0.127.2
-grpc-google-cloud-bigquerystorage-v1:2.3.2:2.3.2
-proto-google-cloud-bigquerystorage-v1beta1:0.127.2:0.127.2
-proto-google-cloud-bigquerystorage-v1beta2:0.127.2:0.127.2
-proto-google-cloud-bigquerystorage-v1:2.3.2:2.3.2
+google-cloud-bigquerystorage:2.3.3:2.3.3
+grpc-google-cloud-bigquerystorage-v1beta1:0.127.3:0.127.3
+grpc-google-cloud-bigquerystorage-v1beta2:0.127.3:0.127.3
+grpc-google-cloud-bigquerystorage-v1:2.3.3:2.3.3
+proto-google-cloud-bigquerystorage-v1beta1:0.127.3:0.127.3
+proto-google-cloud-bigquerystorage-v1beta2:0.127.3:0.127.3
+proto-google-cloud-bigquerystorage-v1:2.3.3:2.3.3