diff --git a/.github/.OwlBot-hermetic.yaml b/.github/.OwlBot-hermetic.yaml
index c09b71682c..8fa14abee7 100644
--- a/.github/.OwlBot-hermetic.yaml
+++ b/.github/.OwlBot-hermetic.yaml
@@ -29,6 +29,7 @@ deep-preserve-regex:
- "/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/ConnectionWorkerPoolTest.java"
- "/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/ConnectionWorkerTest.java"
- "/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/RequestProfiler.java"
+- "/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/TelemetryMetrics.java"
- "/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v.*/BigQueryReadSettings.java"
- "/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v.*/BigQueryReadClient.java"
- "/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/util"
@@ -58,9 +59,11 @@ deep-preserve-regex:
- "/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1.*/JsonStreamWriterTest.java"
- "/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1.*/JsonToProtoMessageTest.java"
- "/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1.*/ProtoSchemaConverterTest.java"
+- "/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1.*/RequestProfilerTest.java"
- "/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1.*/SchemaCompatibilityTest.java"
- "/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1.*/StreamWriterTest.java"
- "/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1.*/StreamWriterV2Test.java"
+- "/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1.*/RequestProfilerTest.java"
- "/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1.*/it/BigQueryResource.java"
- "/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1.*/it/SimpleRowReader.java"
- "/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1.*/stub/EnhancedBigQueryReadStubSettingsTest.java"
@@ -78,7 +81,6 @@ deep-preserve-regex:
- "/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1.*/JsonToProtoMessage.java"
- "/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1.*/OnSchemaUpdateRunnable.java"
- "/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1.*/ProtoSchemaConverter.java"
-- "/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1.*/RequestProfilerTest.java"
- "/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1.*/SchemaCompatibility.java"
- "/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1.*/StreamConnection.java"
- "/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1.*/StreamWriter.java"
diff --git a/.github/release-please.yml b/.github/release-please.yml
index be7512564e..adc955e4b9 100644
--- a/.github/release-please.yml
+++ b/.github/release-please.yml
@@ -26,3 +26,7 @@ branches:
handleGHRelease: true
releaseType: java-backport
branch: 2.47.x
+ - bumpMinorPreMajor: true
+ handleGHRelease: true
+ releaseType: java-backport
+ branch: 3.5.x
diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml
index b2d385e256..9694b0e91e 100644
--- a/.github/sync-repo-settings.yaml
+++ b/.github/sync-repo-settings.yaml
@@ -100,6 +100,20 @@ branchProtectionRules:
- 'Kokoro - Test: Integration'
- cla/google
- javadoc
+ - pattern: 3.5.x
+ isAdminEnforced: true
+ requiredApprovingReviewCount: 1
+ requiresCodeOwnerReviews: true
+ requiresStrictStatusChecks: false
+ requiredStatusCheckContexts:
+ - dependencies (17)
+ - lint
+ - clirr
+ - units (8)
+ - units (11)
+ - 'Kokoro - Test: Integration'
+ - cla/google
+ - javadoc
permissionRules:
- team: yoshi-admins
permission: admin
diff --git a/.github/workflows/hermetic_library_generation.yaml b/.github/workflows/hermetic_library_generation.yaml
index 75183c6739..7b982df899 100644
--- a/.github/workflows/hermetic_library_generation.yaml
+++ b/.github/workflows/hermetic_library_generation.yaml
@@ -17,10 +17,14 @@ name: Hermetic library generation upon generation config change through pull req
on:
pull_request:
+env:
+ HEAD_REF: ${{ github.head_ref }}
+ REPO_FULL_NAME: ${{ github.event.pull_request.head.repo.full_name }}
+
jobs:
library_generation:
- # skip pull requests come from a forked repository
- if: github.event.pull_request.head.repo.full_name == github.repository
+ # skip pull requests coming from a forked repository
+ if: github.env.REPO_FULL_NAME == github.repository
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
@@ -35,6 +39,6 @@ jobs:
[ -z "$(git config user.name)" ] && git config --global user.name "cloud-java-bot"
bash .github/scripts/hermetic_library_generation.sh \
--target_branch ${{ github.base_ref }} \
- --current_branch ${{ github.head_ref }}
+ --current_branch $HEAD_REF
env:
GH_TOKEN: ${{ secrets.CLOUD_JAVA_BOT_TOKEN }}
diff --git a/.github/workflows/unmanaged_dependency_check.yaml b/.github/workflows/unmanaged_dependency_check.yaml
index c5851f84eb..6b248899f1 100644
--- a/.github/workflows/unmanaged_dependency_check.yaml
+++ b/.github/workflows/unmanaged_dependency_check.yaml
@@ -17,6 +17,6 @@ jobs:
# repository
.kokoro/build.sh
- name: Unmanaged dependency check
- uses: googleapis/sdk-platform-java/java-shared-dependencies/unmanaged-dependency-check@google-cloud-shared-dependencies/v3.33.0
+ uses: googleapis/sdk-platform-java/java-shared-dependencies/unmanaged-dependency-check@google-cloud-shared-dependencies/v3.34.0
with:
bom-path: google-cloud-bigquerystorage-bom/pom.xml
diff --git a/.kokoro/continuous/graalvm-native-17.cfg b/.kokoro/continuous/graalvm-native-17.cfg
index 05e0908b25..92ffa46f75 100644
--- a/.kokoro/continuous/graalvm-native-17.cfg
+++ b/.kokoro/continuous/graalvm-native-17.cfg
@@ -3,7 +3,7 @@
# Configure the docker image for kokoro-trampoline.
env_vars: {
key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.33.0"
+ value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.34.0"
}
env_vars: {
diff --git a/.kokoro/continuous/graalvm-native.cfg b/.kokoro/continuous/graalvm-native.cfg
index f8a61aedef..a093a2759c 100644
--- a/.kokoro/continuous/graalvm-native.cfg
+++ b/.kokoro/continuous/graalvm-native.cfg
@@ -3,7 +3,7 @@
# Configure the docker image for kokoro-trampoline.
env_vars: {
key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.33.0"
+ value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.34.0"
}
env_vars: {
diff --git a/CHANGELOG.md b/CHANGELOG.md
index c3d11671b3..2f1d535306 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,22 @@
# Changelog
+## [3.8.1](https://github.com/googleapis/java-bigquerystorage/compare/v3.8.0...v3.8.1) (2024-08-20)
+
+
+### Bug Fixes
+
+* Github workflow vulnerable to script injection ([#2600](https://github.com/googleapis/java-bigquerystorage/issues/2600)) ([9ce25b6](https://github.com/googleapis/java-bigquerystorage/commit/9ce25b6c96bcb5b89f69b8deee65c80c4545758f))
+* RequestProfilerTest.java regex in deep-preserve list ([#2589](https://github.com/googleapis/java-bigquerystorage/issues/2589)) ([e62ac66](https://github.com/googleapis/java-bigquerystorage/commit/e62ac664fb8e7056481ad29547bb33ae73ad2ef0))
+
+
+### Dependencies
+
+* Update core dependencies to v1.24.1 ([#2604](https://github.com/googleapis/java-bigquerystorage/issues/2604)) ([eaac3dc](https://github.com/googleapis/java-bigquerystorage/commit/eaac3dc886fe2b4cdcc8cca71fdba4b8055d70f1))
+* Update dependency com.google.cloud:google-cloud-bigquery to v2.42.0 ([#2586](https://github.com/googleapis/java-bigquerystorage/issues/2586)) ([8893d43](https://github.com/googleapis/java-bigquerystorage/commit/8893d435597dd393f39225eaa186bfb637240816))
+* Update dependency com.google.cloud:sdk-platform-java-config to v3.34.0 ([#2608](https://github.com/googleapis/java-bigquerystorage/issues/2608)) ([b4861b4](https://github.com/googleapis/java-bigquerystorage/commit/b4861b43f873037b8e20da445f0d6e125eab01b9))
+* Update dependency io.opentelemetry:opentelemetry-bom to v1.41.0 ([#2603](https://github.com/googleapis/java-bigquerystorage/issues/2603)) ([896903a](https://github.com/googleapis/java-bigquerystorage/commit/896903ac4ef5cbc315e6e0a6b1d882649f134cab))
+* Update dependency org.junit.vintage:junit-vintage-engine to v5.11.0 ([#2605](https://github.com/googleapis/java-bigquerystorage/issues/2605)) ([754e883](https://github.com/googleapis/java-bigquerystorage/commit/754e88326d99c1baa191eba511be49a28953632c))
+
## [3.8.0](https://github.com/googleapis/java-bigquerystorage/compare/v3.7.0...v3.8.0) (2024-07-26)
diff --git a/README.md b/README.md
index 153fcf451c..d7cfc3dc7b 100644
--- a/README.md
+++ b/README.md
@@ -19,7 +19,7 @@ If you are using Maven with [BOM][libraries-bom], add this to your pom.xml file:
com.google.cloud
libraries-bom
- 26.42.0
+ 26.43.0
pom
import
@@ -42,7 +42,7 @@ If you are using Maven without the BOM, add this to your dependencies:
com.google.cloud
google-cloud-bigquerystorage
- 3.6.1
+ 3.8.0
```
@@ -50,20 +50,20 @@ If you are using Maven without the BOM, add this to your dependencies:
If you are using Gradle 5.x or later, add this to your dependencies:
```Groovy
-implementation platform('com.google.cloud:libraries-bom:26.43.0')
+implementation platform('com.google.cloud:libraries-bom:26.44.0')
implementation 'com.google.cloud:google-cloud-bigquerystorage'
```
If you are using Gradle without BOM, add this to your dependencies:
```Groovy
-implementation 'com.google.cloud:google-cloud-bigquerystorage:3.6.1'
+implementation 'com.google.cloud:google-cloud-bigquerystorage:3.8.0'
```
If you are using SBT, add this to your dependencies:
```Scala
-libraryDependencies += "com.google.cloud" % "google-cloud-bigquerystorage" % "3.6.1"
+libraryDependencies += "com.google.cloud" % "google-cloud-bigquerystorage" % "3.8.0"
```
@@ -219,7 +219,7 @@ Java is a registered trademark of Oracle and/or its affiliates.
[kokoro-badge-link-5]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquerystorage/java11.html
[stability-image]: https://img.shields.io/badge/stability-stable-green
[maven-version-image]: https://img.shields.io/maven-central/v/com.google.cloud/google-cloud-bigquerystorage.svg
-[maven-version-link]: https://central.sonatype.com/artifact/com.google.cloud/google-cloud-bigquerystorage/3.6.1
+[maven-version-link]: https://central.sonatype.com/artifact/com.google.cloud/google-cloud-bigquerystorage/3.8.0
[authentication]: https://github.com/googleapis/google-cloud-java#authentication
[auth-scopes]: https://developers.google.com/identity/protocols/oauth2/scopes
[predefined-iam-roles]: https://cloud.google.com/iam/docs/understanding-roles#predefined_roles
diff --git a/generation_config.yaml b/generation_config.yaml
index e6b63a8496..80c3ffbeaf 100644
--- a/generation_config.yaml
+++ b/generation_config.yaml
@@ -1,6 +1,6 @@
-gapic_generator_version: 2.42.0
-googleapis_commitish: ba245fa19c1e6f1f2a13055a437f0c815c061867
-libraries_bom_version: 26.43.0
+gapic_generator_version: 2.43.0
+googleapis_commitish: fe00da8d228985123a20ce197db6bacc01bdfbcd
+libraries_bom_version: 26.44.0
libraries:
- api_shortname: bigquerystorage
name_pretty: BigQuery Storage
diff --git a/google-cloud-bigquerystorage-bom/pom.xml b/google-cloud-bigquerystorage-bom/pom.xml
index 49e1803f9b..7156d1e782 100644
--- a/google-cloud-bigquerystorage-bom/pom.xml
+++ b/google-cloud-bigquerystorage-bom/pom.xml
@@ -3,12 +3,12 @@
4.0.0
com.google.cloud
google-cloud-bigquerystorage-bom
- 3.8.0
+ 3.8.1
pom
com.google.cloud
sdk-platform-java-config
- 3.33.0
+ 3.34.0
Google Cloud bigquerystorage BOM
@@ -52,37 +52,37 @@
com.google.cloud
google-cloud-bigquerystorage
- 3.8.0
+ 3.8.1
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1beta1
- 0.180.0
+ 0.180.1
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1beta2
- 0.180.0
+ 0.180.1
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1
- 3.8.0
+ 3.8.1
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1beta1
- 0.180.0
+ 0.180.1
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1beta2
- 0.180.0
+ 0.180.1
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1
- 3.8.0
+ 3.8.1
diff --git a/google-cloud-bigquerystorage/pom.xml b/google-cloud-bigquerystorage/pom.xml
index 815903e108..d7bf5d3fd6 100644
--- a/google-cloud-bigquerystorage/pom.xml
+++ b/google-cloud-bigquerystorage/pom.xml
@@ -3,7 +3,7 @@
4.0.0
com.google.cloud
google-cloud-bigquerystorage
- 3.8.0
+ 3.8.1
jar
BigQuery Storage
https://github.com/googleapis/java-bigquerystorage
@@ -11,7 +11,7 @@
com.google.cloud
google-cloud-bigquerystorage-parent
- 3.8.0
+ 3.8.1
google-cloud-bigquerystorage
@@ -155,7 +155,7 @@
com.google.auth
google-auth-library-credentials
- 1.24.0
+ 1.24.1
io.opentelemetry
@@ -224,7 +224,7 @@
com.google.auth
google-auth-library-oauth2-http
- 1.24.0
+ 1.24.1
test
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/ConnectionWorker.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/ConnectionWorker.java
index 2f3ce676cd..35d6f1da76 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/ConnectionWorker.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/ConnectionWorker.java
@@ -31,19 +31,12 @@
import com.google.cloud.bigquery.storage.v1.StreamConnection.RequestCallback;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
-import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.Uninterruptibles;
import com.google.protobuf.Int64Value;
import io.grpc.Status;
import io.grpc.Status.Code;
import io.grpc.StatusRuntimeException;
-import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.api.common.Attributes;
-import io.opentelemetry.api.common.AttributesBuilder;
-import io.opentelemetry.api.metrics.LongCounter;
-import io.opentelemetry.api.metrics.LongHistogram;
-import io.opentelemetry.api.metrics.Meter;
-import io.opentelemetry.api.metrics.MeterProvider;
import java.io.IOException;
import java.time.Duration;
import java.time.Instant;
@@ -250,6 +243,7 @@ class ConnectionWorker implements AutoCloseable {
private final RetrySettings retrySettings;
private final RequestProfiler.RequestProfilerHook requestProfilerHook;
+ private final TelemetryMetrics telemetryMetrics;
private static String projectMatching = "projects/[^/]+/";
private static Pattern streamPatternProject = Pattern.compile(projectMatching);
@@ -259,38 +253,7 @@ class ConnectionWorker implements AutoCloseable {
private static String tableMatching = "(projects/[^/]+/datasets/[^/]+/tables/[^/]+)/";
private static Pattern streamPatternTable = Pattern.compile(tableMatching);
- private final boolean enableOpenTelemetry;
- private Meter writeMeter;
- static AttributeKey telemetryKeyTableId = AttributeKey.stringKey("table_id");
- static AttributeKey telemetryKeyWriterId = AttributeKey.stringKey("writer_id");
- private static String dataflowPrefix = "dataflow:";
- static List> telemetryKeysTraceId =
- new ArrayList>() {
- {
- add(AttributeKey.stringKey("trace_field_1"));
- add(AttributeKey.stringKey("trace_field_2"));
- add(AttributeKey.stringKey("trace_field_3"));
- }
- };
- static AttributeKey telemetryKeyErrorCode = AttributeKey.stringKey("error_code");
- static AttributeKey telemetryKeyIsRetry = AttributeKey.stringKey("is_retry");
- private Attributes telemetryAttributes;
// Latency buckets are based on a list of 1.5 ^ n
- private static final List METRICS_MILLISECONDS_LATENCY_BUCKETS =
- ImmutableList.of(
- 0L, 17L, 38L, 86L, 195L, 438L, 985L, 2217L, 4988L, 11223L, 25251L, 56815L, 127834L,
- 287627L, 647160L);
-
- private static final class OpenTelemetryMetrics {
- private LongCounter instrumentAckedRequestCount;
- private LongCounter instrumentAckedRequestSize;
- private LongCounter instrumentAckedRequestRows;
- private LongHistogram instrumentNetworkResponseLatency;
- private LongCounter instrumentConnectionStartCount;
- private LongCounter instrumentConnectionEndCount;
- }
-
- private OpenTelemetryMetrics telemetryMetrics = new OpenTelemetryMetrics();
public static Boolean isDefaultStreamName(String streamName) {
Matcher matcher = DEFAULT_STREAM_PATTERN.matcher(streamName);
@@ -322,151 +285,33 @@ private String getTableName() {
return tableMatcher.find() ? tableMatcher.group(1) : "";
}
- private void setTraceIdAttributesPart(
- AttributesBuilder builder,
- String[] traceIdParts,
- int indexPartsToCheck,
- int indexTelemetryKeysToUse) {
- if ((indexPartsToCheck < traceIdParts.length) && !traceIdParts[indexPartsToCheck].isEmpty()) {
- builder.put(
- telemetryKeysTraceId.get(indexTelemetryKeysToUse), traceIdParts[indexPartsToCheck]);
- }
- }
-
- private void setTraceIdAttributes(AttributesBuilder builder) {
- if ((this.traceId != null) && !this.traceId.isEmpty()) {
- int indexDataflow = this.traceId.toLowerCase().indexOf(dataflowPrefix);
- if (indexDataflow >= 0) {
- String[] traceIdParts =
- this.traceId.substring(indexDataflow + dataflowPrefix.length()).split(":", 8);
- setTraceIdAttributesPart(builder, traceIdParts, 0, 0);
- setTraceIdAttributesPart(builder, traceIdParts, 1, 1);
- setTraceIdAttributesPart(builder, traceIdParts, 2, 2);
+ public boolean hasActiveConnection() {
+ boolean isConnected = false;
+ this.lock.lock();
+ try {
+ if (streamConnectionIsConnected) {
+ isConnected = true;
}
+ } finally {
+ this.lock.unlock();
}
+ return isConnected;
}
- // Specify common attributes for all metrics.
- // For example, table name and writer id.
- // Metrics dashboards can be filtered on available attributes.
- private Attributes buildOpenTelemetryAttributes() {
- AttributesBuilder builder = Attributes.builder();
- String tableName = getTableName();
- if (!tableName.isEmpty()) {
- builder.put(telemetryKeyTableId, tableName);
- }
- builder.put(telemetryKeyWriterId, writerId);
- setTraceIdAttributes(builder);
- return builder.build();
- }
-
- // Refresh the table name attribute when multiplexing switches between tables.
- private void refreshOpenTelemetryTableNameAttributes() {
- String tableName = getTableName();
- if (!tableName.isEmpty()
- && !tableName.equals(getTelemetryAttributes().get(telemetryKeyTableId))) {
- AttributesBuilder builder = getTelemetryAttributes().toBuilder();
- builder.put(telemetryKeyTableId, tableName);
- this.telemetryAttributes = builder.build();
- }
- }
-
- // Build new attributes augmented with an error code string.
- private Attributes augmentAttributesWithErrorCode(Attributes attributes, String errorCode) {
- AttributesBuilder builder = attributes.toBuilder();
- if ((errorCode != null) && !errorCode.isEmpty()) {
- builder.put(telemetryKeyErrorCode, errorCode);
+ public int getInflightRequestQueueLength() {
+ int length = 0;
+ this.lock.lock();
+ try {
+ length = inflightRequestQueue.size();
+ } finally {
+ this.lock.unlock();
}
- return builder.build();
- }
-
- // Build new attributes augmented with a flag indicating this was a retry.
- private Attributes augmentAttributesWithRetry(Attributes attributes) {
- AttributesBuilder builder = attributes.toBuilder();
- builder.put(telemetryKeyIsRetry, "1");
- return builder.build();
+ return length;
}
@VisibleForTesting
Attributes getTelemetryAttributes() {
- return telemetryAttributes;
- }
-
- private void registerOpenTelemetryMetrics() {
- MeterProvider meterProvider = Singletons.getOpenTelemetry().getMeterProvider();
- writeMeter =
- meterProvider
- .meterBuilder("com.google.cloud.bigquery.storage.v1.write")
- .setInstrumentationVersion(
- ConnectionWorker.class.getPackage().getImplementationVersion())
- .build();
- telemetryMetrics.instrumentAckedRequestCount =
- writeMeter
- .counterBuilder("append_requests_acked")
- .setDescription("Counts number of requests acked by the server")
- .build();
- telemetryMetrics.instrumentAckedRequestSize =
- writeMeter
- .counterBuilder("append_request_bytes_acked")
- .setDescription("Counts byte size of requests acked by the server")
- .build();
- telemetryMetrics.instrumentAckedRequestRows =
- writeMeter
- .counterBuilder("append_rows_acked")
- .setDescription("Counts number of request rows acked by the server")
- .build();
- writeMeter
- .gaugeBuilder("active_connection_count")
- .ofLongs()
- .setDescription("Reports number of active connections")
- .buildWithCallback(
- measurement -> {
- int count = 0;
- this.lock.lock();
- try {
- if (streamConnectionIsConnected) {
- count = 1;
- }
- } finally {
- this.lock.unlock();
- }
- measurement.record(count, getTelemetryAttributes());
- });
- writeMeter
- .gaugeBuilder("inflight_queue_length")
- .ofLongs()
- .setDescription(
- "Reports length of inflight queue. This queue contains sent append requests waiting for response from the server.")
- .buildWithCallback(
- measurement -> {
- int length = 0;
- this.lock.lock();
- try {
- length = inflightRequestQueue.size();
- } finally {
- this.lock.unlock();
- }
- measurement.record(length, getTelemetryAttributes());
- });
- telemetryMetrics.instrumentNetworkResponseLatency =
- writeMeter
- .histogramBuilder("network_response_latency")
- .ofLongs()
- .setDescription(
- "Reports time taken in milliseconds for a response to arrive once a message has been sent over the network.")
- .setExplicitBucketBoundariesAdvice(METRICS_MILLISECONDS_LATENCY_BUCKETS)
- .build();
- telemetryMetrics.instrumentConnectionStartCount =
- writeMeter
- .counterBuilder("connection_start_count")
- .setDescription(
- "Counts number of connection attempts made, regardless of whether these are initial or retry.")
- .build();
- telemetryMetrics.instrumentConnectionEndCount =
- writeMeter
- .counterBuilder("connection_end_count")
- .setDescription("Counts number of connection end events.")
- .build();
+ return telemetryMetrics.getTelemetryAttributes();
}
public ConnectionWorker(
@@ -505,10 +350,9 @@ public ConnectionWorker(
this.inflightRequestQueue = new LinkedList();
this.compressorName = compressorName;
this.retrySettings = retrySettings;
- this.telemetryAttributes = buildOpenTelemetryAttributes();
this.requestProfilerHook = new RequestProfiler.RequestProfilerHook(enableRequestProfiler);
- this.enableOpenTelemetry = enableOpenTelemetry;
- registerOpenTelemetryMetrics();
+ this.telemetryMetrics =
+ new TelemetryMetrics(this, enableOpenTelemetry, getTableName(), writerId, traceId);
// Always recreate a client for connection worker.
HashMap newHeaders = new HashMap<>();
@@ -559,7 +403,7 @@ public void run() {
private void resetConnection() {
log.info("Start connecting stream: " + streamName + " id: " + writerId);
- telemetryMetrics.instrumentConnectionStartCount.add(1, getTelemetryAttributes());
+ telemetryMetrics.recordConnectionStart();
if (this.streamConnection != null) {
// It's safe to directly close the previous connection as the in flight messages
// will be picked up by the next connection.
@@ -1002,7 +846,7 @@ private void appendLoop() {
|| (originalRequest.getProtoRows().hasWriterSchema()
&& !originalRequest.getProtoRows().getWriterSchema().equals(writerSchema))) {
streamName = originalRequest.getWriteStream();
- refreshOpenTelemetryTableNameAttributes();
+ telemetryMetrics.refreshOpenTelemetryTableNameAttributes(getTableName());
writerSchema = originalRequest.getProtoRows().getWriterSchema();
isMultiplexing = true;
firstRequestForTableOrSchemaSwitch = true;
@@ -1309,8 +1153,7 @@ private void requestCallback(AppendRowsResponse response) {
Instant sendInstant = inflightRequestQueue.getFirst().requestSendTimeStamp;
if (sendInstant != null) {
Duration durationLatency = Duration.between(sendInstant, Instant.now());
- telemetryMetrics.instrumentNetworkResponseLatency.record(
- durationLatency.toMillis(), getTelemetryAttributes());
+ telemetryMetrics.recordNetworkLatency(durationLatency);
}
requestWrapper = pollFirstInflightRequestQueue();
@@ -1333,21 +1176,13 @@ private void requestCallback(AppendRowsResponse response) {
this.lock.unlock();
}
- Attributes augmentedTelemetryAttributes =
- augmentAttributesWithErrorCode(
- getTelemetryAttributes(),
- Code.values()[
- response.hasError() ? response.getError().getCode() : Status.Code.OK.ordinal()]
- .toString());
- if (requestWrapper.retryCount > 0) {
- augmentedTelemetryAttributes = augmentAttributesWithRetry(augmentedTelemetryAttributes);
- }
- telemetryMetrics.instrumentAckedRequestCount.add(1, augmentedTelemetryAttributes);
- telemetryMetrics.instrumentAckedRequestSize.add(
- requestWrapper.messageSize, augmentedTelemetryAttributes);
- telemetryMetrics.instrumentAckedRequestRows.add(
+ telemetryMetrics.recordResponse(
+ requestWrapper.messageSize,
requestWrapper.message.getProtoRows().getRows().getSerializedRowsCount(),
- augmentedTelemetryAttributes);
+ Code.values()[
+ response.hasError() ? response.getError().getCode() : Status.Code.OK.ordinal()]
+ .toString(),
+ requestWrapper.retryCount > 0);
// Retries need to happen on the same thread as queue locking may occur
if (response.hasError()) {
@@ -1431,11 +1266,8 @@ private void doneCallback(Throwable finalStatus) {
this.lock.lock();
try {
this.streamConnectionIsConnected = false;
- this.telemetryMetrics.instrumentConnectionEndCount.add(
- 1,
- augmentAttributesWithErrorCode(
- getTelemetryAttributes(),
- Code.values()[Status.fromThrowable(finalStatus).getCode().ordinal()].toString()));
+ this.telemetryMetrics.recordConnectionEnd(
+ Code.values()[Status.fromThrowable(finalStatus).getCode().ordinal()].toString());
if (connectionFinalStatus == null) {
if (connectionRetryStartTime == 0) {
connectionRetryStartTime = System.currentTimeMillis();
@@ -1447,8 +1279,7 @@ private void doneCallback(Throwable finalStatus) {
|| System.currentTimeMillis() - connectionRetryStartTime
<= maxRetryDuration.toMillis())) {
this.conectionRetryCountWithoutCallback++;
- this.telemetryMetrics.instrumentConnectionStartCount.add(
- 1, augmentAttributesWithRetry(getTelemetryAttributes()));
+ this.telemetryMetrics.recordConnectionStartWithRetry();
log.info(
"Connection is going to be reestablished with the next request. Retriable error "
+ finalStatus.toString()
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/Singletons.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/Singletons.java
index aae8cd99dd..958cb9e227 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/Singletons.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/Singletons.java
@@ -30,6 +30,7 @@ class Singletons {
static OpenTelemetry getOpenTelemetry() {
if (openTelemetry == null) {
openTelemetry = GlobalOpenTelemetry.get();
+ log.info("BigQueryStorage initialized Open Telemetry");
}
return openTelemetry;
}
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/TelemetryMetrics.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/TelemetryMetrics.java
new file mode 100644
index 0000000000..c0e5214fc3
--- /dev/null
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/TelemetryMetrics.java
@@ -0,0 +1,263 @@
+/*
+ * Copyright 2024 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.google.cloud.bigquery.storage.v1;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.ImmutableList;
+import io.opentelemetry.api.common.AttributeKey;
+import io.opentelemetry.api.common.Attributes;
+import io.opentelemetry.api.common.AttributesBuilder;
+import io.opentelemetry.api.metrics.LongCounter;
+import io.opentelemetry.api.metrics.LongHistogram;
+import io.opentelemetry.api.metrics.Meter;
+import io.opentelemetry.api.metrics.MeterProvider;
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Logger;
+
+/**
+ * Enables export of metrics to OpenTelemetry. Since it keeps track of whether metrics are
+ * enabled/disabled, it simplifies calling code by removing the need for tedious if clauses to check
+ * whether metrics are enabled/disabled. Also, GlobalOpenTelemetry.get() should only be called after
+ * client application has already installed the necessary meter provider.
+ */
+public class TelemetryMetrics {
+ private static final Logger log = Logger.getLogger(TelemetryMetrics.class.getName());
+
+ private final ConnectionWorker connectionWorker;
+ private final boolean enableMetrics;
+
+ private Meter writeMeter;
+
+ private static final class OpenTelemetryMetrics {
+ private LongCounter instrumentAckedRequestCount;
+ private LongCounter instrumentAckedRequestSize;
+ private LongCounter instrumentAckedRequestRows;
+ private LongHistogram instrumentNetworkResponseLatency;
+ private LongCounter instrumentConnectionStartCount;
+ private LongCounter instrumentConnectionEndCount;
+ }
+
+ private OpenTelemetryMetrics openTelemetryMetrics = new OpenTelemetryMetrics();
+ private static final List METRICS_MILLISECONDS_LATENCY_BUCKETS =
+ ImmutableList.of(
+ 0L, 17L, 38L, 86L, 195L, 438L, 985L, 2217L, 4988L, 11223L, 25251L, 56815L, 127834L,
+ 287627L, 647160L);
+ static AttributeKey telemetryKeyTableId = AttributeKey.stringKey("table_id");
+ static AttributeKey telemetryKeyWriterId = AttributeKey.stringKey("writer_id");
+ private static String dataflowPrefix = "dataflow:";
+ static List> telemetryKeysTraceId =
+ new ArrayList>() {
+ {
+ add(AttributeKey.stringKey("trace_field_1"));
+ add(AttributeKey.stringKey("trace_field_2"));
+ add(AttributeKey.stringKey("trace_field_3"));
+ }
+ };
+ static AttributeKey telemetryKeyErrorCode = AttributeKey.stringKey("error_code");
+ static AttributeKey telemetryKeyIsRetry = AttributeKey.stringKey("is_retry");
+ private Attributes telemetryAttributes;
+
+ private void setTraceIdAttributesPart(
+ AttributesBuilder builder,
+ String[] traceIdParts,
+ int indexPartsToCheck,
+ int indexTelemetryKeysToUse) {
+ if ((indexPartsToCheck < traceIdParts.length) && !traceIdParts[indexPartsToCheck].isEmpty()) {
+ builder.put(
+ telemetryKeysTraceId.get(indexTelemetryKeysToUse), traceIdParts[indexPartsToCheck]);
+ }
+ }
+
+ private void setTraceIdAttributes(AttributesBuilder builder, String traceId) {
+ if ((traceId != null) && !traceId.isEmpty()) {
+ int indexDataflow = traceId.toLowerCase().indexOf(dataflowPrefix);
+ if (indexDataflow >= 0) {
+ String[] traceIdParts =
+ traceId.substring(indexDataflow + dataflowPrefix.length()).split(":", 8);
+ setTraceIdAttributesPart(builder, traceIdParts, 0, 0);
+ setTraceIdAttributesPart(builder, traceIdParts, 1, 1);
+ setTraceIdAttributesPart(builder, traceIdParts, 2, 2);
+ }
+ }
+ }
+
+ // Specify common attributes for all metrics.
+ // For example, table name and writer id.
+ // Metrics dashboards can be filtered on available attributes.
+ private Attributes buildOpenTelemetryAttributes(
+ String tableName, String writerId, String traceId) {
+ AttributesBuilder builder = Attributes.builder();
+ if (!tableName.isEmpty()) {
+ builder.put(telemetryKeyTableId, tableName);
+ }
+ builder.put(telemetryKeyWriterId, writerId);
+ setTraceIdAttributes(builder, traceId);
+ return builder.build();
+ }
+
+ // Build new attributes augmented with an error code string.
+ private Attributes augmentAttributesWithErrorCode(Attributes attributes, String errorCode) {
+ AttributesBuilder builder = attributes.toBuilder();
+ if ((errorCode != null) && !errorCode.isEmpty()) {
+ builder.put(telemetryKeyErrorCode, errorCode);
+ }
+ return builder.build();
+ }
+
+ // Build new attributes augmented with a flag indicating this was a retry.
+ private Attributes augmentAttributesWithRetry(Attributes attributes) {
+ AttributesBuilder builder = attributes.toBuilder();
+ builder.put(telemetryKeyIsRetry, "1");
+ return builder.build();
+ }
+
+ @VisibleForTesting
+ Attributes getTelemetryAttributes() {
+ return telemetryAttributes;
+ }
+
+ private void registerOpenTelemetryMetrics(ConnectionWorker connectionWorker) {
+ MeterProvider meterProvider = Singletons.getOpenTelemetry().getMeterProvider();
+ writeMeter =
+ meterProvider
+ .meterBuilder("com.google.cloud.bigquery.storage.v1.write")
+ .setInstrumentationVersion(
+ ConnectionWorker.class.getPackage().getImplementationVersion())
+ .build();
+ openTelemetryMetrics.instrumentAckedRequestCount =
+ writeMeter
+ .counterBuilder("append_requests_acked")
+ .setDescription("Counts number of requests acked by the server")
+ .build();
+ openTelemetryMetrics.instrumentAckedRequestSize =
+ writeMeter
+ .counterBuilder("append_request_bytes_acked")
+ .setDescription("Counts byte size of requests acked by the server")
+ .build();
+ openTelemetryMetrics.instrumentAckedRequestRows =
+ writeMeter
+ .counterBuilder("append_rows_acked")
+ .setDescription("Counts number of request rows acked by the server")
+ .build();
+ writeMeter
+ .gaugeBuilder("active_connection_count")
+ .ofLongs()
+ .setDescription("Reports number of active connections")
+ .buildWithCallback(
+ measurement -> {
+ int count = connectionWorker.hasActiveConnection() ? 1 : 0;
+ measurement.record(count, getTelemetryAttributes());
+ });
+ writeMeter
+ .gaugeBuilder("inflight_queue_length")
+ .ofLongs()
+ .setDescription(
+ "Reports length of inflight queue. This queue contains sent append requests waiting for response from the server.")
+ .buildWithCallback(
+ measurement -> {
+ int length = connectionWorker.getInflightRequestQueueLength();
+ measurement.record(length, getTelemetryAttributes());
+ });
+ openTelemetryMetrics.instrumentNetworkResponseLatency =
+ writeMeter
+ .histogramBuilder("network_response_latency")
+ .ofLongs()
+ .setDescription(
+ "Reports time taken in milliseconds for a response to arrive once a message has been sent over the network.")
+ .setExplicitBucketBoundariesAdvice(METRICS_MILLISECONDS_LATENCY_BUCKETS)
+ .build();
+ openTelemetryMetrics.instrumentConnectionStartCount =
+ writeMeter
+ .counterBuilder("connection_start_count")
+ .setDescription(
+ "Counts number of connection attempts made, regardless of whether these are initial or retry.")
+ .build();
+ openTelemetryMetrics.instrumentConnectionEndCount =
+ writeMeter
+ .counterBuilder("connection_end_count")
+ .setDescription("Counts number of connection end events.")
+ .build();
+ }
+
+ TelemetryMetrics(
+ ConnectionWorker connectionWorker,
+ boolean enableMetrics,
+ String tableName,
+ String writerId,
+ String traceId) {
+ this.connectionWorker = connectionWorker;
+ this.enableMetrics = enableMetrics;
+ if (enableMetrics) {
+ this.telemetryAttributes = buildOpenTelemetryAttributes(tableName, writerId, traceId);
+ registerOpenTelemetryMetrics(connectionWorker);
+ }
+ }
+
+ // Refresh the table name attribute when multiplexing switches between tables.
+ public void refreshOpenTelemetryTableNameAttributes(String tableName) {
+ if (enableMetrics) {
+ if (!tableName.isEmpty()
+ && !tableName.equals(getTelemetryAttributes().get(telemetryKeyTableId))) {
+ AttributesBuilder builder = getTelemetryAttributes().toBuilder();
+ builder.put(telemetryKeyTableId, tableName);
+ this.telemetryAttributes = builder.build();
+ }
+ }
+ }
+
+ public void recordConnectionStart() {
+ if (enableMetrics) {
+ openTelemetryMetrics.instrumentConnectionStartCount.add(1, getTelemetryAttributes());
+ }
+ }
+
+ public void recordConnectionStartWithRetry() {
+ if (enableMetrics) {
+ openTelemetryMetrics.instrumentConnectionStartCount.add(
+ 1, augmentAttributesWithRetry(getTelemetryAttributes()));
+ }
+ }
+
+ public void recordConnectionEnd(String errorCode) {
+ if (enableMetrics) {
+ openTelemetryMetrics.instrumentConnectionEndCount.add(
+ 1, augmentAttributesWithErrorCode(getTelemetryAttributes(), errorCode));
+ }
+ }
+
+ public void recordNetworkLatency(Duration latency) {
+ if (enableMetrics) {
+ openTelemetryMetrics.instrumentNetworkResponseLatency.record(
+ latency.toMillis(), getTelemetryAttributes());
+ }
+ }
+
+ public void recordResponse(long messageSize, long rowCount, String errorCode, boolean isRetry) {
+ if (enableMetrics) {
+ Attributes augmentedTelemetryAttributes =
+ augmentAttributesWithErrorCode(getTelemetryAttributes(), errorCode);
+ if (isRetry) {
+ augmentedTelemetryAttributes = augmentAttributesWithRetry(augmentedTelemetryAttributes);
+ }
+ openTelemetryMetrics.instrumentAckedRequestCount.add(1, augmentedTelemetryAttributes);
+ openTelemetryMetrics.instrumentAckedRequestSize.add(
+ messageSize, augmentedTelemetryAttributes);
+ openTelemetryMetrics.instrumentAckedRequestRows.add(rowCount, augmentedTelemetryAttributes);
+ }
+ }
+}
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStubSettings.java
index a0741f80ee..66799aa246 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStubSettings.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryReadStubSettings.java
@@ -17,6 +17,7 @@
package com.google.cloud.bigquery.storage.v1.stub;
import com.google.api.core.ApiFunction;
+import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
@@ -137,6 +138,7 @@ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuild
}
/** Returns the default service endpoint. */
+ @ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "bigquerystorage.googleapis.com:443";
}
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryWriteStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryWriteStubSettings.java
index bd4decba9c..3205756828 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryWriteStubSettings.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/BigQueryWriteStubSettings.java
@@ -17,6 +17,7 @@
package com.google.cloud.bigquery.storage.v1.stub;
import com.google.api.core.ApiFunction;
+import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
@@ -163,6 +164,7 @@ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuild
}
/** Returns the default service endpoint. */
+ @ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "bigquerystorage.googleapis.com:443";
}
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/EnhancedBigQueryReadStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/EnhancedBigQueryReadStubSettings.java
index 9348c71956..a6169ad9e2 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/EnhancedBigQueryReadStubSettings.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/stub/EnhancedBigQueryReadStubSettings.java
@@ -17,6 +17,7 @@
import com.google.api.core.ApiFunction;
import com.google.api.core.BetaApi;
+import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
@@ -93,6 +94,7 @@ public String getServiceName() {
}
/** Returns the default service endpoint. */
+ @ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return BigQueryReadStubSettings.getDefaultEndpoint();
}
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStubSettings.java
index e7300230c3..1cd7fa40cc 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStubSettings.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/BigQueryStorageStubSettings.java
@@ -18,6 +18,7 @@
import com.google.api.core.ApiFunction;
import com.google.api.core.BetaApi;
+import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
@@ -158,6 +159,7 @@ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuild
}
/** Returns the default service endpoint. */
+ @ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "bigquerystorage.googleapis.com:443";
}
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/EnhancedBigQueryStorageStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/EnhancedBigQueryStorageStubSettings.java
index ce8b9f5672..6c04de776c 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/EnhancedBigQueryStorageStubSettings.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/stub/EnhancedBigQueryStorageStubSettings.java
@@ -17,6 +17,7 @@
import com.google.api.core.ApiFunction;
import com.google.api.core.BetaApi;
+import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
@@ -112,6 +113,7 @@ public String getServiceName() {
}
/** Returns the default service endpoint. */
+ @ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return BigQueryStorageStubSettings.getDefaultEndpoint();
}
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStubSettings.java
index f85be47160..0965e1807c 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStubSettings.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryReadStubSettings.java
@@ -18,6 +18,7 @@
import com.google.api.core.ApiFunction;
import com.google.api.core.BetaApi;
+import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
@@ -139,6 +140,7 @@ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuild
}
/** Returns the default service endpoint. */
+ @ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "bigquerystorage.googleapis.com:443";
}
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStubSettings.java
index 34adc3be4f..880e48c1b9 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStubSettings.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/BigQueryWriteStubSettings.java
@@ -18,6 +18,7 @@
import com.google.api.core.ApiFunction;
import com.google.api.core.BetaApi;
+import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
@@ -198,6 +199,7 @@ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuild
}
/** Returns the default service endpoint. */
+ @ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "bigquerystorage.googleapis.com:443";
}
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/EnhancedBigQueryReadStubSettings.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/EnhancedBigQueryReadStubSettings.java
index 4daecedfb6..db65a9dd7b 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/EnhancedBigQueryReadStubSettings.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/stub/EnhancedBigQueryReadStubSettings.java
@@ -17,6 +17,7 @@
import com.google.api.core.ApiFunction;
import com.google.api.core.BetaApi;
+import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
@@ -93,6 +94,7 @@ public String getServiceName() {
}
/** Returns the default service endpoint. */
+ @ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return BigQueryReadStubSettings.getDefaultEndpoint();
}
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/ConnectionWorkerTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/ConnectionWorkerTest.java
index efcc253434..2e540d7b2a 100644
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/ConnectionWorkerTest.java
+++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/ConnectionWorkerTest.java
@@ -787,10 +787,10 @@ private void exerciseOpenTelemetryAttributesWithStreamNames(String streamName, S
client.getSettings(),
retrySettings,
/*enableRequestProfiler=*/ false,
- /*enableOpenTelemetry=*/ false);
+ /*enableOpenTelemetry=*/ true);
Attributes attributes = connectionWorker.getTelemetryAttributes();
- String attributesTableId = attributes.get(ConnectionWorker.telemetryKeyTableId);
+ String attributesTableId = attributes.get(TelemetryMetrics.telemetryKeyTableId);
assertEquals(expected, attributesTableId);
}
@@ -807,7 +807,7 @@ public void testOpenTelemetryAttributesWithStreamNames() throws Exception {
}
void checkOpenTelemetryTraceIdAttribute(Attributes attributes, int index, String expected) {
- String attributesTraceId = attributes.get(ConnectionWorker.telemetryKeysTraceId.get(index));
+ String attributesTraceId = attributes.get(TelemetryMetrics.telemetryKeysTraceId.get(index));
assertEquals(expected, attributesTraceId);
}
@@ -829,7 +829,7 @@ void exerciseOpenTelemetryAttributesWithTraceId(
client.getSettings(),
retrySettings,
/*enableRequestProfiler=*/ false,
- /*enableOpenTelemetry=*/ false);
+ /*enableOpenTelemetry=*/ true);
Attributes attributes = connectionWorker.getTelemetryAttributes();
checkOpenTelemetryTraceIdAttribute(attributes, 0, expectedField1);
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/StreamWriterTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/StreamWriterTest.java
index 43abdb5999..5bd242e0ee 100644
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/StreamWriterTest.java
+++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/StreamWriterTest.java
@@ -835,12 +835,14 @@ public void testOpenTelemetryAttributes_MultiplexingCase() throws Exception {
.setWriterSchema(createProtoSchema())
.setLocation("US")
.setEnableConnectionPool(true)
+ .setEnableOpenTelemetry(true)
.build();
StreamWriter writer2 =
StreamWriter.newBuilder(TEST_STREAM_2, client)
.setWriterSchema(createProtoSchema())
.setLocation("US")
.setEnableConnectionPool(true)
+ .setEnableOpenTelemetry(true)
.build();
testBigQueryWrite.addResponse(createAppendResponse(0));
@@ -849,13 +851,13 @@ public void testOpenTelemetryAttributes_MultiplexingCase() throws Exception {
ApiFuture appendFuture1 = sendTestMessage(writer1, new String[] {"A"});
assertEquals(0, appendFuture1.get().getAppendResult().getOffset().getValue());
Attributes attributes = writer1.getTelemetryAttributes();
- String attributesTableId = attributes.get(ConnectionWorker.telemetryKeyTableId);
+ String attributesTableId = attributes.get(TelemetryMetrics.telemetryKeyTableId);
assertEquals("projects/p/datasets/d1/tables/t1", attributesTableId);
ApiFuture appendFuture2 = sendTestMessage(writer2, new String[] {"A"});
assertEquals(1, appendFuture2.get().getAppendResult().getOffset().getValue());
attributes = writer2.getTelemetryAttributes();
- attributesTableId = attributes.get(ConnectionWorker.telemetryKeyTableId);
+ attributesTableId = attributes.get(TelemetryMetrics.telemetryKeyTableId);
assertEquals("projects/p/datasets/d2/tables/t2", attributesTableId);
writer1.close();
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryBigDecimalByteStringEncoderTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryBigDecimalByteStringEncoderTest.java
deleted file mode 100644
index ef6ec2d953..0000000000
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryBigDecimalByteStringEncoderTest.java
+++ /dev/null
@@ -1,182 +0,0 @@
-/*
- * Copyright 2021 Google LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.google.cloud.bigquery.storage.v1beta2.it;
-
-import static org.junit.Assert.assertEquals;
-
-import com.google.api.core.ApiFuture;
-import com.google.cloud.ServiceOptions;
-import com.google.cloud.bigquery.BigQuery;
-import com.google.cloud.bigquery.DatasetInfo;
-import com.google.cloud.bigquery.Field.Mode;
-import com.google.cloud.bigquery.FieldValueList;
-import com.google.cloud.bigquery.Schema;
-import com.google.cloud.bigquery.StandardSQLTypeName;
-import com.google.cloud.bigquery.StandardTableDefinition;
-import com.google.cloud.bigquery.TableId;
-import com.google.cloud.bigquery.TableInfo;
-import com.google.cloud.bigquery.TableResult;
-import com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse;
-import com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse.AppendResult;
-import com.google.cloud.bigquery.storage.v1beta2.BigDecimalByteStringEncoder;
-import com.google.cloud.bigquery.storage.v1beta2.BigQueryWriteClient;
-import com.google.cloud.bigquery.storage.v1beta2.JsonStreamWriter;
-import com.google.cloud.bigquery.storage.v1beta2.TableFieldSchema;
-import com.google.cloud.bigquery.storage.v1beta2.TableName;
-import com.google.cloud.bigquery.storage.v1beta2.TableSchema;
-import com.google.cloud.bigquery.testing.RemoteBigQueryHelper;
-import com.google.protobuf.Descriptors;
-import java.io.IOException;
-import java.math.BigDecimal;
-import java.util.Iterator;
-import java.util.concurrent.ExecutionException;
-import java.util.logging.Logger;
-import org.json.JSONArray;
-import org.json.JSONObject;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-public class ITBigQueryBigDecimalByteStringEncoderTest {
- private static final Logger LOG =
- Logger.getLogger(ITBigQueryBigDecimalByteStringEncoderTest.class.getName());
- private static final String DATASET = RemoteBigQueryHelper.generateDatasetName();
- private static final String TABLE = "testtable";
- private static final String DESCRIPTION = "BigQuery Write Java manual client test dataset";
-
- private static BigQueryWriteClient client;
- private static TableInfo tableInfo;
- private static BigQuery bigquery;
-
- @BeforeClass
- public static void beforeClass() throws IOException {
- client = BigQueryWriteClient.create();
-
- RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create();
- bigquery = bigqueryHelper.getOptions().getService();
- DatasetInfo datasetInfo =
- DatasetInfo.newBuilder(/* datasetId = */ DATASET).setDescription(DESCRIPTION).build();
- bigquery.create(datasetInfo);
- tableInfo =
- TableInfo.newBuilder(
- TableId.of(DATASET, TABLE),
- StandardTableDefinition.of(
- Schema.of(
- com.google.cloud.bigquery.Field.newBuilder(
- "test_numeric_zero", StandardSQLTypeName.NUMERIC)
- .build(),
- com.google.cloud.bigquery.Field.newBuilder(
- "test_numeric_one", StandardSQLTypeName.NUMERIC)
- .build(),
- com.google.cloud.bigquery.Field.newBuilder(
- "test_numeric_repeated", StandardSQLTypeName.NUMERIC)
- .setMode(Mode.REPEATED)
- .build())))
- .build();
- bigquery.create(tableInfo);
- }
-
- @AfterClass
- public static void afterClass() {
- if (client != null) {
- client.close();
- }
- if (bigquery != null) {
- RemoteBigQueryHelper.forceDelete(bigquery, DATASET);
- }
- }
-
- @Test
- public void TestBigDecimalEncoding()
- throws IOException, InterruptedException, ExecutionException,
- Descriptors.DescriptorValidationException {
- TableName parent = TableName.of(ServiceOptions.getDefaultProjectId(), DATASET, TABLE);
- TableFieldSchema TEST_NUMERIC_ZERO =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.NUMERIC)
- .setMode(TableFieldSchema.Mode.NULLABLE)
- .setName("test_numeric_zero")
- .build();
- TableFieldSchema TEST_NUMERIC_ONE =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.NUMERIC)
- .setMode(TableFieldSchema.Mode.NULLABLE)
- .setName("test_numeric_one")
- .build();
- TableFieldSchema TEST_NUMERIC_REPEATED =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.NUMERIC)
- .setMode(TableFieldSchema.Mode.REPEATED)
- .setName("test_numeric_repeated")
- .build();
- TableSchema tableSchema =
- TableSchema.newBuilder()
- .addFields(0, TEST_NUMERIC_ZERO)
- .addFields(1, TEST_NUMERIC_ONE)
- .addFields(2, TEST_NUMERIC_REPEATED)
- .build();
- try (JsonStreamWriter jsonStreamWriter =
- JsonStreamWriter.newBuilder(parent.toString(), tableSchema).build()) {
- JSONObject row = new JSONObject();
- row.put(
- "test_numeric_zero",
- BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("0")));
- row.put(
- "test_numeric_one",
- BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("1.2")));
- row.put(
- "test_numeric_repeated",
- new JSONArray(
- new byte[][] {
- BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("0"))
- .toByteArray(),
- BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("1.2"))
- .toByteArray(),
- BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("-1.2"))
- .toByteArray(),
- BigDecimalByteStringEncoder.encodeToNumericByteString(
- new BigDecimal("99999999999999999999999999999.999999999"))
- .toByteArray(),
- BigDecimalByteStringEncoder.encodeToNumericByteString(
- new BigDecimal("-99999999999999999999999999999.999999999"))
- .toByteArray(),
- }));
- JSONArray jsonArr = new JSONArray(new JSONObject[] {row});
- ApiFuture response = jsonStreamWriter.append(jsonArr, -1);
- AppendRowsResponse arr = response.get();
- AppendResult ar = arr.getAppendResult();
- boolean ho = ar.hasOffset();
- TableResult result =
- bigquery.listTableData(
- tableInfo.getTableId(), BigQuery.TableDataListOption.startIndex(0L));
- Iterator iter = result.getValues().iterator();
- FieldValueList currentRow;
- currentRow = iter.next();
- assertEquals("0", currentRow.get(0).getStringValue());
- assertEquals("1.2", currentRow.get(1).getStringValue());
- assertEquals("0", currentRow.get(2).getRepeatedValue().get(0).getStringValue());
- assertEquals("1.2", currentRow.get(2).getRepeatedValue().get(1).getStringValue());
- assertEquals("-1.2", currentRow.get(2).getRepeatedValue().get(2).getStringValue());
- assertEquals(
- "99999999999999999999999999999.999999999",
- currentRow.get(2).getRepeatedValue().get(3).getStringValue());
- assertEquals(
- "-99999999999999999999999999999.999999999",
- currentRow.get(2).getRepeatedValue().get(4).getStringValue());
- }
- }
-}
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryTimeEncoderTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryTimeEncoderTest.java
deleted file mode 100644
index 4906de9c64..0000000000
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryTimeEncoderTest.java
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- * Copyright 2021 Google LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.google.cloud.bigquery.storage.v1beta2.it;
-
-import static org.junit.Assert.assertEquals;
-
-import com.google.api.core.ApiFuture;
-import com.google.cloud.ServiceOptions;
-import com.google.cloud.bigquery.BigQuery;
-import com.google.cloud.bigquery.DatasetInfo;
-import com.google.cloud.bigquery.Field.Mode;
-import com.google.cloud.bigquery.FieldValueList;
-import com.google.cloud.bigquery.Schema;
-import com.google.cloud.bigquery.StandardSQLTypeName;
-import com.google.cloud.bigquery.StandardTableDefinition;
-import com.google.cloud.bigquery.TableId;
-import com.google.cloud.bigquery.TableInfo;
-import com.google.cloud.bigquery.TableResult;
-import com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse;
-import com.google.cloud.bigquery.storage.v1beta2.BigQueryWriteClient;
-import com.google.cloud.bigquery.storage.v1beta2.CivilTimeEncoder;
-import com.google.cloud.bigquery.storage.v1beta2.JsonStreamWriter;
-import com.google.cloud.bigquery.storage.v1beta2.TableFieldSchema;
-import com.google.cloud.bigquery.storage.v1beta2.TableName;
-import com.google.cloud.bigquery.storage.v1beta2.TableSchema;
-import com.google.cloud.bigquery.testing.RemoteBigQueryHelper;
-import com.google.protobuf.Descriptors;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.concurrent.ExecutionException;
-import org.json.JSONArray;
-import org.json.JSONObject;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.threeten.bp.LocalDateTime;
-import org.threeten.bp.LocalTime;
-
-public class ITBigQueryTimeEncoderTest {
- private static final String DATASET = RemoteBigQueryHelper.generateDatasetName();
- private static final String TABLE = "testtable";
- private static final String DESCRIPTION = "BigQuery Write Java manual client test dataset";
-
- private static BigQueryWriteClient client;
- private static TableInfo tableInfo;
- private static BigQuery bigquery;
-
- @BeforeClass
- public static void beforeClass() throws IOException {
- client = BigQueryWriteClient.create();
-
- RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create();
- bigquery = bigqueryHelper.getOptions().getService();
- DatasetInfo datasetInfo =
- DatasetInfo.newBuilder(/* datasetId = */ DATASET).setDescription(DESCRIPTION).build();
- bigquery.create(datasetInfo);
- tableInfo =
- TableInfo.newBuilder(
- TableId.of(DATASET, TABLE),
- StandardTableDefinition.of(
- Schema.of(
- com.google.cloud.bigquery.Field.newBuilder(
- "test_str", StandardSQLTypeName.STRING)
- .build(),
- com.google.cloud.bigquery.Field.newBuilder(
- "test_time_micros", StandardSQLTypeName.TIME)
- .setMode(Mode.REPEATED)
- .build(),
- com.google.cloud.bigquery.Field.newBuilder(
- "test_datetime_micros", StandardSQLTypeName.DATETIME)
- .setMode(Mode.REPEATED)
- .build())))
- .build();
- bigquery.create(tableInfo);
- }
-
- @AfterClass
- public static void afterClass() {
- if (client != null) {
- client.close();
- }
- if (bigquery != null) {
- RemoteBigQueryHelper.forceDelete(bigquery, DATASET);
- }
- }
-
- @Test
- public void TestTimeEncoding()
- throws IOException, InterruptedException, ExecutionException,
- Descriptors.DescriptorValidationException {
- TableName parent = TableName.of(ServiceOptions.getDefaultProjectId(), DATASET, TABLE);
- TableFieldSchema TEST_STRING =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.STRING)
- .setMode(TableFieldSchema.Mode.NULLABLE)
- .setName("test_str")
- .build();
- TableFieldSchema TEST_TIME =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.TIME)
- .setMode(TableFieldSchema.Mode.REPEATED)
- .setName("test_time_micros")
- .build();
- TableFieldSchema TEST_DATETIME =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.DATETIME)
- .setMode(TableFieldSchema.Mode.REPEATED)
- .setName("test_datetime_micros")
- .build();
- TableSchema tableSchema =
- TableSchema.newBuilder()
- .addFields(0, TEST_STRING)
- .addFields(1, TEST_TIME)
- .addFields(2, TEST_DATETIME)
- .build();
- try (JsonStreamWriter jsonStreamWriter =
- JsonStreamWriter.newBuilder(parent.toString(), tableSchema).build()) {
- JSONObject row = new JSONObject();
- row.put("test_str", "Start of the day");
- row.put(
- "test_time_micros",
- new JSONArray(
- new long[] {
- CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(13, 14, 15, 16_000_000)),
- CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(23, 59, 59, 999_999_000)),
- CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(0, 0, 0, 0)),
- CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(1, 2, 3, 4_000)),
- CivilTimeEncoder.encodePacked64TimeMicros(LocalTime.of(5, 6, 7, 8_000))
- }));
- row.put(
- "test_datetime_micros",
- new JSONArray(
- new long[] {
- CivilTimeEncoder.encodePacked64DatetimeMicros(
- LocalDateTime.of(1, 1, 1, 12, 0, 0, 0)),
- CivilTimeEncoder.encodePacked64DatetimeMicros(
- LocalDateTime.of(1995, 5, 19, 10, 30, 45, 0)),
- CivilTimeEncoder.encodePacked64DatetimeMicros(
- LocalDateTime.of(2000, 1, 1, 0, 0, 0, 0)),
- CivilTimeEncoder.encodePacked64DatetimeMicros(
- LocalDateTime.of(2026, 3, 11, 5, 45, 12, 9_000_000)),
- CivilTimeEncoder.encodePacked64DatetimeMicros(
- LocalDateTime.of(2050, 1, 2, 3, 4, 5, 6_000)),
- }));
- JSONArray jsonArr = new JSONArray(new JSONObject[] {row});
- ApiFuture response = jsonStreamWriter.append(jsonArr, -1);
- Assert.assertFalse(response.get().getAppendResult().hasOffset());
- TableResult result =
- bigquery.listTableData(
- tableInfo.getTableId(), BigQuery.TableDataListOption.startIndex(0L));
- Iterator iter = result.getValues().iterator();
- FieldValueList currentRow;
- currentRow = iter.next();
- assertEquals("Start of the day", currentRow.get(0).getValue());
- assertEquals("13:14:15.016000", currentRow.get(1).getRepeatedValue().get(0).getStringValue());
- assertEquals("23:59:59.999999", currentRow.get(1).getRepeatedValue().get(1).getStringValue());
- assertEquals("00:00:00", currentRow.get(1).getRepeatedValue().get(2).getStringValue());
- assertEquals("01:02:03.000004", currentRow.get(1).getRepeatedValue().get(3).getStringValue());
- assertEquals("05:06:07.000008", currentRow.get(1).getRepeatedValue().get(4).getStringValue());
-
- assertEquals(
- "0001-01-01T12:00:00", currentRow.get(2).getRepeatedValue().get(0).getStringValue());
- assertEquals(
- "1995-05-19T10:30:45", currentRow.get(2).getRepeatedValue().get(1).getStringValue());
- assertEquals(
- "2000-01-01T00:00:00", currentRow.get(2).getRepeatedValue().get(2).getStringValue());
- assertEquals(
- "2026-03-11T05:45:12.009000",
- currentRow.get(2).getRepeatedValue().get(3).getStringValue());
- assertEquals(
- "2050-01-02T03:04:05.000006",
- currentRow.get(2).getRepeatedValue().get(4).getStringValue());
- }
- }
-}
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryWriteManualClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryWriteManualClientTest.java
deleted file mode 100644
index 3c1eeef8fd..0000000000
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/it/ITBigQueryWriteManualClientTest.java
+++ /dev/null
@@ -1,531 +0,0 @@
-/*
- * Copyright 2020 Google LLC
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * https://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.google.cloud.bigquery.storage.v1beta2.it;
-
-import static com.google.common.truth.Truth.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-import com.google.api.core.ApiFuture;
-import com.google.cloud.ServiceOptions;
-import com.google.cloud.bigquery.*;
-import com.google.cloud.bigquery.Schema;
-import com.google.cloud.bigquery.storage.test.Test.*;
-import com.google.cloud.bigquery.storage.v1beta2.*;
-import com.google.cloud.bigquery.testing.RemoteBigQueryHelper;
-import com.google.protobuf.Descriptors;
-import java.io.IOException;
-import java.math.BigDecimal;
-import java.util.*;
-import java.util.concurrent.*;
-import java.util.logging.Logger;
-import org.json.JSONArray;
-import org.json.JSONObject;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.threeten.bp.LocalDateTime;
-
-/** Integration tests for BigQuery Write API. */
-public class ITBigQueryWriteManualClientTest {
- private static final Logger LOG =
- Logger.getLogger(ITBigQueryWriteManualClientTest.class.getName());
- private static final String DATASET = RemoteBigQueryHelper.generateDatasetName();
- private static final String DATASET_EU = RemoteBigQueryHelper.generateDatasetName();
- private static final String TABLE = "testtable";
- private static final String TABLE2 = "complicatedtable";
- private static final String DESCRIPTION = "BigQuery Write Java manual client test dataset";
-
- private static BigQueryWriteClient client;
- private static TableInfo tableInfo;
- private static TableInfo tableInfo2;
- private static TableInfo tableInfoEU;
- private static String tableId;
- private static String tableId2;
- private static String tableIdEU;
- private static BigQuery bigquery;
-
- @BeforeClass
- public static void beforeClass() throws IOException {
- client = BigQueryWriteClient.create();
-
- RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create();
- bigquery = bigqueryHelper.getOptions().getService();
- DatasetInfo datasetInfo =
- DatasetInfo.newBuilder(/* datasetId = */ DATASET).setDescription(DESCRIPTION).build();
- bigquery.create(datasetInfo);
- LOG.info("Created test dataset: " + DATASET);
- tableInfo =
- TableInfo.newBuilder(
- TableId.of(DATASET, TABLE),
- StandardTableDefinition.of(
- Schema.of(
- com.google.cloud.bigquery.Field.newBuilder("foo", LegacySQLTypeName.STRING)
- .setMode(Field.Mode.NULLABLE)
- .build())))
- .build();
- com.google.cloud.bigquery.Field.Builder innerTypeFieldBuilder =
- com.google.cloud.bigquery.Field.newBuilder(
- "inner_type",
- LegacySQLTypeName.RECORD,
- com.google.cloud.bigquery.Field.newBuilder("value", LegacySQLTypeName.STRING)
- .setMode(Field.Mode.REPEATED)
- .build());
-
- tableInfo2 =
- TableInfo.newBuilder(
- TableId.of(DATASET, TABLE2),
- StandardTableDefinition.of(
- Schema.of(
- Field.newBuilder(
- "nested_repeated_type",
- LegacySQLTypeName.RECORD,
- innerTypeFieldBuilder.setMode(Field.Mode.REPEATED).build())
- .setMode(Field.Mode.REPEATED)
- .build(),
- innerTypeFieldBuilder.setMode(Field.Mode.NULLABLE).build())))
- .build();
- bigquery.create(tableInfo);
- bigquery.create(tableInfo2);
- tableId =
- String.format(
- "projects/%s/datasets/%s/tables/%s",
- ServiceOptions.getDefaultProjectId(), DATASET, TABLE);
- tableId2 =
- String.format(
- "projects/%s/datasets/%s/tables/%s",
- ServiceOptions.getDefaultProjectId(), DATASET, TABLE2);
- DatasetInfo datasetInfoEU =
- DatasetInfo.newBuilder(/* datasetId = */ DATASET_EU)
- .setLocation("EU")
- .setDescription(DESCRIPTION)
- .build();
- bigquery.create(datasetInfoEU);
- tableInfoEU =
- TableInfo.newBuilder(
- TableId.of(DATASET_EU, TABLE),
- StandardTableDefinition.of(
- Schema.of(
- com.google.cloud.bigquery.Field.newBuilder("foo", LegacySQLTypeName.STRING)
- .build())))
- .build();
- tableIdEU =
- String.format(
- "projects/%s/datasets/%s/tables/%s",
- ServiceOptions.getDefaultProjectId(), DATASET_EU, TABLE);
- bigquery.create(tableInfoEU);
- }
-
- @AfterClass
- public static void afterClass() {
- if (client != null) {
- client.close();
- }
-
- if (bigquery != null) {
- RemoteBigQueryHelper.forceDelete(bigquery, DATASET);
- LOG.info("Deleted test dataset: " + DATASET);
- }
- }
-
- ProtoRows CreateProtoRows(String[] messages) {
- ProtoRows.Builder rows = ProtoRows.newBuilder();
- for (String message : messages) {
- FooType foo = FooType.newBuilder().setFoo(message).build();
- rows.addSerializedRows(foo.toByteString());
- }
- return rows.build();
- }
-
- ProtoRows CreateProtoRowsComplex(String[] messages) {
- ProtoRows.Builder rows = ProtoRows.newBuilder();
- for (String message : messages) {
- ComplicateType foo =
- ComplicateType.newBuilder()
- .setInnerType(InnerType.newBuilder().addValue(message).addValue(message).build())
- .build();
- rows.addSerializedRows(foo.toByteString());
- }
- return rows.build();
- }
-
- @Test
- public void testBatchWriteWithCommittedStreamEU()
- throws IOException, InterruptedException, ExecutionException {
- WriteStream writeStream =
- client.createWriteStream(
- CreateWriteStreamRequest.newBuilder()
- .setParent(tableIdEU)
- .setWriteStream(
- WriteStream.newBuilder().setType(WriteStream.Type.COMMITTED).build())
- .build());
- StreamWriterV2 streamWriter =
- StreamWriterV2.newBuilder(writeStream.getName())
- .setWriterSchema(ProtoSchemaConverter.convert(FooType.getDescriptor()))
- .build();
- LOG.info("Sending one message");
-
- ApiFuture response =
- streamWriter.append(CreateProtoRows(new String[] {"aaa"}), 0);
- assertEquals(0, response.get().getAppendResult().getOffset().getValue());
-
- LOG.info("Sending two more messages");
- ApiFuture response1 =
- streamWriter.append(CreateProtoRows(new String[] {"bbb", "ccc"}), 1);
- ApiFuture response2 =
- streamWriter.append(CreateProtoRows(new String[] {"ddd"}), 3);
- assertEquals(1, response1.get().getAppendResult().getOffset().getValue());
- assertEquals(3, response2.get().getAppendResult().getOffset().getValue());
-
- TableResult result =
- bigquery.listTableData(
- tableInfoEU.getTableId(), BigQuery.TableDataListOption.startIndex(0L));
- Iterator iter = result.getValues().iterator();
- assertEquals("aaa", iter.next().get(0).getStringValue());
- assertEquals("bbb", iter.next().get(0).getStringValue());
- assertEquals("ccc", iter.next().get(0).getStringValue());
- assertEquals("ddd", iter.next().get(0).getStringValue());
- assertEquals(false, iter.hasNext());
- }
-
- @Test
- public void testJsonStreamWriterCommittedStream()
- throws IOException, InterruptedException, ExecutionException,
- Descriptors.DescriptorValidationException {
- String tableName = "JsonTable";
- TableInfo tableInfo =
- TableInfo.newBuilder(
- TableId.of(DATASET, tableName),
- StandardTableDefinition.of(
- Schema.of(
- com.google.cloud.bigquery.Field.newBuilder(
- "test_str", StandardSQLTypeName.STRING)
- .build(),
- com.google.cloud.bigquery.Field.newBuilder(
- "test_numerics", StandardSQLTypeName.NUMERIC)
- .setMode(Field.Mode.REPEATED)
- .build(),
- com.google.cloud.bigquery.Field.newBuilder(
- "test_datetime", StandardSQLTypeName.DATETIME)
- .build())))
- .build();
- bigquery.create(tableInfo);
- TableName parent = TableName.of(ServiceOptions.getDefaultProjectId(), DATASET, tableName);
- WriteStream writeStream =
- client.createWriteStream(
- CreateWriteStreamRequest.newBuilder()
- .setParent(parent.toString())
- .setWriteStream(
- WriteStream.newBuilder().setType(WriteStream.Type.COMMITTED).build())
- .build());
- try (JsonStreamWriter jsonStreamWriter =
- JsonStreamWriter.newBuilder(writeStream.getName(), writeStream.getTableSchema()).build()) {
- LOG.info("Sending one message");
- JSONObject row1 = new JSONObject();
- row1.put("test_str", "aaa");
- row1.put(
- "test_numerics",
- new JSONArray(
- new byte[][] {
- BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("123.4"))
- .toByteArray(),
- BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("-9000000"))
- .toByteArray()
- }));
- row1.put(
- "test_datetime",
- CivilTimeEncoder.encodePacked64DatetimeMicros(LocalDateTime.of(2020, 10, 1, 12, 0)));
- JSONArray jsonArr1 = new JSONArray(new JSONObject[] {row1});
-
- ApiFuture response1 = jsonStreamWriter.append(jsonArr1, -1);
-
- assertEquals(0, response1.get().getAppendResult().getOffset().getValue());
-
- JSONObject row2 = new JSONObject();
- row1.put("test_str", "bbb");
- JSONObject row3 = new JSONObject();
- row2.put("test_str", "ccc");
- JSONArray jsonArr2 = new JSONArray();
- jsonArr2.put(row1);
- jsonArr2.put(row2);
-
- JSONObject row4 = new JSONObject();
- row4.put("test_str", "ddd");
- JSONArray jsonArr3 = new JSONArray();
- jsonArr3.put(row4);
-
- LOG.info("Sending two more messages");
- ApiFuture response2 = jsonStreamWriter.append(jsonArr2, -1);
- LOG.info("Sending one more message");
- ApiFuture response3 = jsonStreamWriter.append(jsonArr3, -1);
- assertEquals(1, response2.get().getAppendResult().getOffset().getValue());
- assertEquals(3, response3.get().getAppendResult().getOffset().getValue());
-
- TableResult result =
- bigquery.listTableData(
- tableInfo.getTableId(), BigQuery.TableDataListOption.startIndex(0L));
- Iterator iter = result.getValues().iterator();
- FieldValueList currentRow = iter.next();
- assertEquals("aaa", currentRow.get(0).getStringValue());
- assertEquals("-9000000", currentRow.get(1).getRepeatedValue().get(1).getStringValue());
- assertEquals("2020-10-01T12:00:00", currentRow.get(2).getStringValue());
- assertEquals("bbb", iter.next().get(0).getStringValue());
- assertEquals("ccc", iter.next().get(0).getStringValue());
- assertEquals("ddd", iter.next().get(0).getStringValue());
- assertEquals(false, iter.hasNext());
- }
- }
-
- @Test
- public void testJsonStreamWriterWithDefaultStream()
- throws IOException, InterruptedException, ExecutionException,
- Descriptors.DescriptorValidationException {
- String tableName = "JsonTableDefaultStream";
- TableFieldSchema TEST_STRING =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.STRING)
- .setMode(TableFieldSchema.Mode.NULLABLE)
- .setName("test_str")
- .build();
- TableFieldSchema TEST_NUMERIC =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.NUMERIC)
- .setMode(TableFieldSchema.Mode.REPEATED)
- .setName("test_numerics")
- .build();
- TableFieldSchema TEST_DATE =
- TableFieldSchema.newBuilder()
- .setType(TableFieldSchema.Type.DATETIME)
- .setMode(TableFieldSchema.Mode.NULLABLE)
- .setName("test_datetime")
- .build();
- TableSchema tableSchema =
- TableSchema.newBuilder()
- .addFields(0, TEST_STRING)
- .addFields(1, TEST_DATE)
- .addFields(2, TEST_NUMERIC)
- .build();
- TableInfo tableInfo =
- TableInfo.newBuilder(
- TableId.of(DATASET, tableName),
- StandardTableDefinition.of(
- Schema.of(
- com.google.cloud.bigquery.Field.newBuilder(
- "test_str", StandardSQLTypeName.STRING)
- .build(),
- com.google.cloud.bigquery.Field.newBuilder(
- "test_numerics", StandardSQLTypeName.NUMERIC)
- .setMode(Field.Mode.REPEATED)
- .build(),
- com.google.cloud.bigquery.Field.newBuilder(
- "test_datetime", StandardSQLTypeName.DATETIME)
- .build())))
- .build();
- bigquery.create(tableInfo);
- TableName parent = TableName.of(ServiceOptions.getDefaultProjectId(), DATASET, tableName);
- try (JsonStreamWriter jsonStreamWriter =
- JsonStreamWriter.newBuilder(parent.toString(), tableSchema).build()) {
- LOG.info("Sending one message");
- JSONObject row1 = new JSONObject();
- row1.put("test_str", "aaa");
- row1.put(
- "test_numerics",
- new JSONArray(
- new byte[][] {
- BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("123.4"))
- .toByteArray(),
- BigDecimalByteStringEncoder.encodeToNumericByteString(new BigDecimal("-9000000"))
- .toByteArray()
- }));
- row1.put(
- "test_datetime",
- CivilTimeEncoder.encodePacked64DatetimeMicros(LocalDateTime.of(2020, 10, 1, 12, 0)));
- JSONArray jsonArr1 = new JSONArray(new JSONObject[] {row1});
-
- ApiFuture response1 = jsonStreamWriter.append(jsonArr1, -1);
-
- assertEquals(0, response1.get().getAppendResult().getOffset().getValue());
-
- JSONObject row2 = new JSONObject();
- row1.put("test_str", "bbb");
- JSONObject row3 = new JSONObject();
- row2.put("test_str", "ccc");
- JSONArray jsonArr2 = new JSONArray();
- jsonArr2.put(row1);
- jsonArr2.put(row2);
-
- JSONObject row4 = new JSONObject();
- row4.put("test_str", "ddd");
- JSONArray jsonArr3 = new JSONArray();
- jsonArr3.put(row4);
-
- LOG.info("Sending two more messages");
- ApiFuture response2 = jsonStreamWriter.append(jsonArr2, -1);
- LOG.info("Sending one more message");
- ApiFuture response3 = jsonStreamWriter.append(jsonArr3, -1);
- Assert.assertFalse(response2.get().getAppendResult().hasOffset());
- Assert.assertFalse(response3.get().getAppendResult().hasOffset());
-
- TableResult result =
- bigquery.listTableData(
- tableInfo.getTableId(), BigQuery.TableDataListOption.startIndex(0L));
- Iterator iter = result.getValues().iterator();
- FieldValueList currentRow = iter.next();
- assertEquals("aaa", currentRow.get(0).getStringValue());
- assertEquals("-9000000", currentRow.get(1).getRepeatedValue().get(1).getStringValue());
- assertEquals("2020-10-01T12:00:00", currentRow.get(2).getStringValue());
- assertEquals("bbb", iter.next().get(0).getStringValue());
- assertEquals("ccc", iter.next().get(0).getStringValue());
- assertEquals("ddd", iter.next().get(0).getStringValue());
- assertEquals(false, iter.hasNext());
- }
- }
-
- @Test
- public void testComplicateSchemaWithPendingStream()
- throws IOException, InterruptedException, ExecutionException {
- LOG.info("Create a write stream");
- WriteStream writeStream =
- client.createWriteStream(
- CreateWriteStreamRequest.newBuilder()
- .setParent(tableId2)
- .setWriteStream(WriteStream.newBuilder().setType(WriteStream.Type.PENDING).build())
- .build());
- FinalizeWriteStreamResponse finalizeResponse = FinalizeWriteStreamResponse.getDefaultInstance();
- try (StreamWriterV2 streamWriter =
- StreamWriterV2.newBuilder(writeStream.getName())
- .setWriterSchema(ProtoSchemaConverter.convert(ComplicateType.getDescriptor()))
- .build()) {
- LOG.info("Sending two messages");
- ApiFuture response =
- streamWriter.append(CreateProtoRowsComplex(new String[] {"aaa"}), 0L);
- assertEquals(0, response.get().getAppendResult().getOffset().getValue());
-
- ApiFuture response2 =
- streamWriter.append(CreateProtoRowsComplex(new String[] {"bbb"}), 1L);
- assertEquals(1, response2.get().getAppendResult().getOffset().getValue());
-
- // Nothing showed up since rows are not committed.
- TableResult result =
- bigquery.listTableData(
- tableInfo2.getTableId(), BigQuery.TableDataListOption.startIndex(0L));
- Iterator iter = result.getValues().iterator();
- assertEquals(false, iter.hasNext());
-
- LOG.info("Finalize a write stream");
- finalizeResponse =
- client.finalizeWriteStream(
- FinalizeWriteStreamRequest.newBuilder().setName(writeStream.getName()).build());
-
- ApiFuture response3 =
- streamWriter.append(CreateProtoRows(new String[] {"ccc"}), 2L);
- try {
- response3.get();
- Assert.fail("Append to finalized stream should fail.");
- } catch (Exception expected) {
- LOG.info("Got exception: " + expected.toString());
- }
- }
- assertEquals(2, finalizeResponse.getRowCount());
- LOG.info("Commit a write stream");
- BatchCommitWriteStreamsResponse batchCommitWriteStreamsResponse =
- client.batchCommitWriteStreams(
- BatchCommitWriteStreamsRequest.newBuilder()
- .setParent(tableId2)
- .addWriteStreams(writeStream.getName())
- .build());
- assertEquals(true, batchCommitWriteStreamsResponse.hasCommitTime());
- TableResult queryResult =
- bigquery.query(
- QueryJobConfiguration.newBuilder("SELECT * from " + DATASET + '.' + TABLE2).build());
- Iterator queryIter = queryResult.getValues().iterator();
- assertTrue(queryIter.hasNext());
- assertEquals(
- "[FieldValue{attribute=REPEATED, value=[FieldValue{attribute=PRIMITIVE, value=aaa}, FieldValue{attribute=PRIMITIVE, value=aaa}]}]",
- queryIter.next().get(1).getRepeatedValue().toString());
- assertEquals(
- "[FieldValue{attribute=REPEATED, value=[FieldValue{attribute=PRIMITIVE, value=bbb}, FieldValue{attribute=PRIMITIVE, value=bbb}]}]",
- queryIter.next().get(1).getRepeatedValue().toString());
- assertFalse(queryIter.hasNext());
- }
-
- @Test
- public void testStreamError() throws IOException, InterruptedException, ExecutionException {
- WriteStream writeStream =
- client.createWriteStream(
- CreateWriteStreamRequest.newBuilder()
- .setParent(tableId)
- .setWriteStream(
- WriteStream.newBuilder().setType(WriteStream.Type.COMMITTED).build())
- .build());
- try (StreamWriterV2 streamWriter =
- StreamWriterV2.newBuilder(writeStream.getName())
- .setWriterSchema(ProtoSchemaConverter.convert(FooType.getDescriptor()))
- .build()) {
- ApiFuture response =
- streamWriter.append(CreateProtoRows(new String[] {"aaa"}), -1L);
- assertEquals(0L, response.get().getAppendResult().getOffset().getValue());
- // Send in a bogus stream name should cause in connection error.
- ApiFuture response2 =
- streamWriter.append(CreateProtoRows(new String[] {"aaa"}), 100L);
- try {
- response2.get();
- Assert.fail("Should fail");
- } catch (ExecutionException e) {
- assertThat(e.getCause().getMessage())
- .contains("OUT_OF_RANGE: The offset is beyond stream, expected offset 1, received 100");
- }
- // We can keep sending requests on the same stream.
- ApiFuture response3 =
- streamWriter.append(CreateProtoRows(new String[] {"aaa"}), -1L);
- assertEquals(1L, response3.get().getAppendResult().getOffset().getValue());
- } finally {
- }
- }
-
- @Test
- public void testStreamReconnect() throws IOException, InterruptedException, ExecutionException {
- WriteStream writeStream =
- client.createWriteStream(
- CreateWriteStreamRequest.newBuilder()
- .setParent(tableId)
- .setWriteStream(
- WriteStream.newBuilder().setType(WriteStream.Type.COMMITTED).build())
- .build());
- try (StreamWriterV2 streamWriter =
- StreamWriterV2.newBuilder(writeStream.getName())
- .setWriterSchema(ProtoSchemaConverter.convert(FooType.getDescriptor()))
- .build()) {
- ApiFuture response =
- streamWriter.append(CreateProtoRows(new String[] {"aaa"}), 0L);
- assertEquals(0L, response.get().getAppendResult().getOffset().getValue());
- }
-
- try (StreamWriterV2 streamWriter =
- StreamWriterV2.newBuilder(writeStream.getName())
- .setWriterSchema(ProtoSchemaConverter.convert(FooType.getDescriptor()))
- .build()) {
- // Currently there is a bug that reconnection must wait 5 seconds to get the real row count.
- Thread.sleep(5000L);
- ApiFuture response =
- streamWriter.append(CreateProtoRows(new String[] {"bbb"}), 1L);
- assertEquals(1L, response.get().getAppendResult().getOffset().getValue());
- }
- }
-}
diff --git a/grpc-google-cloud-bigquerystorage-v1/pom.xml b/grpc-google-cloud-bigquerystorage-v1/pom.xml
index 0c842e3227..66e6813db9 100644
--- a/grpc-google-cloud-bigquerystorage-v1/pom.xml
+++ b/grpc-google-cloud-bigquerystorage-v1/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1
- 3.8.0
+ 3.8.1
grpc-google-cloud-bigquerystorage-v1
GRPC library for grpc-google-cloud-bigquerystorage-v1
com.google.cloud
google-cloud-bigquerystorage-parent
- 3.8.0
+ 3.8.1
diff --git a/grpc-google-cloud-bigquerystorage-v1beta1/pom.xml b/grpc-google-cloud-bigquerystorage-v1beta1/pom.xml
index 2ef27c7d25..16a903c09d 100644
--- a/grpc-google-cloud-bigquerystorage-v1beta1/pom.xml
+++ b/grpc-google-cloud-bigquerystorage-v1beta1/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1beta1
- 0.180.0
+ 0.180.1
grpc-google-cloud-bigquerystorage-v1beta1
GRPC library for grpc-google-cloud-bigquerystorage-v1beta1
com.google.cloud
google-cloud-bigquerystorage-parent
- 3.8.0
+ 3.8.1
diff --git a/grpc-google-cloud-bigquerystorage-v1beta2/pom.xml b/grpc-google-cloud-bigquerystorage-v1beta2/pom.xml
index aee9460ce5..33fe0cf433 100644
--- a/grpc-google-cloud-bigquerystorage-v1beta2/pom.xml
+++ b/grpc-google-cloud-bigquerystorage-v1beta2/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1beta2
- 0.180.0
+ 0.180.1
grpc-google-cloud-bigquerystorage-v1beta2
GRPC library for grpc-google-cloud-bigquerystorage-v1beta2
com.google.cloud
google-cloud-bigquerystorage-parent
- 3.8.0
+ 3.8.1
diff --git a/pom.xml b/pom.xml
index 589de9ec3c..005b72de5e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -4,7 +4,7 @@
com.google.cloud
google-cloud-bigquerystorage-parent
pom
- 3.8.0
+ 3.8.1
BigQuery Storage Parent
https://github.com/googleapis/java-bigquerystorage
@@ -14,7 +14,7 @@
com.google.cloud
sdk-platform-java-config
- 3.33.0
+ 3.34.0
@@ -83,37 +83,37 @@
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1beta1
- 0.180.0
+ 0.180.1
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1beta2
- 0.180.0
+ 0.180.1
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1
- 3.8.0
+ 3.8.1
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1beta1
- 0.180.0
+ 0.180.1
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1beta2
- 0.180.0
+ 0.180.1
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1
- 3.8.0
+ 3.8.1
com.google.cloud
google-cloud-bigquerystorage
- 3.8.0
+ 3.8.1
org.json
@@ -123,7 +123,7 @@
io.opentelemetry
opentelemetry-bom
- 1.40.0
+ 1.41.0
pom
import
@@ -138,7 +138,7 @@
com.google.cloud
google-cloud-bigquery
- 2.41.0
+ 2.42.0
test
@@ -273,7 +273,7 @@
org.junit.vintage
junit-vintage-engine
- 5.10.3
+ 5.11.0
test
@@ -287,7 +287,7 @@
org.junit.vintage
junit-vintage-engine
- 5.10.3
+ 5.11.0
diff --git a/proto-google-cloud-bigquerystorage-v1/pom.xml b/proto-google-cloud-bigquerystorage-v1/pom.xml
index 5d55b376ea..0868a60f30 100644
--- a/proto-google-cloud-bigquerystorage-v1/pom.xml
+++ b/proto-google-cloud-bigquerystorage-v1/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1
- 3.8.0
+ 3.8.1
proto-google-cloud-bigquerystorage-v1
PROTO library for proto-google-cloud-bigquerystorage-v1
com.google.cloud
google-cloud-bigquerystorage-parent
- 3.8.0
+ 3.8.1
diff --git a/proto-google-cloud-bigquerystorage-v1beta1/pom.xml b/proto-google-cloud-bigquerystorage-v1beta1/pom.xml
index 0bf05e1b3d..afaf9cd822 100644
--- a/proto-google-cloud-bigquerystorage-v1beta1/pom.xml
+++ b/proto-google-cloud-bigquerystorage-v1beta1/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1beta1
- 0.180.0
+ 0.180.1
proto-google-cloud-bigquerystorage-v1beta1
PROTO library for proto-google-cloud-bigquerystorage-v1beta1
com.google.cloud
google-cloud-bigquerystorage-parent
- 3.8.0
+ 3.8.1
diff --git a/proto-google-cloud-bigquerystorage-v1beta2/pom.xml b/proto-google-cloud-bigquerystorage-v1beta2/pom.xml
index f511e2f214..bbfff645d4 100644
--- a/proto-google-cloud-bigquerystorage-v1beta2/pom.xml
+++ b/proto-google-cloud-bigquerystorage-v1beta2/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1beta2
- 0.180.0
+ 0.180.1
proto-google-cloud-bigquerystorage-v1beta2
PROTO library for proto-google-cloud-bigquerystorage-v1beta2
com.google.cloud
google-cloud-bigquerystorage-parent
- 3.8.0
+ 3.8.1
diff --git a/samples/install-without-bom/pom.xml b/samples/install-without-bom/pom.xml
index ac45233521..e8caa15025 100644
--- a/samples/install-without-bom/pom.xml
+++ b/samples/install-without-bom/pom.xml
@@ -30,14 +30,14 @@
com.google.cloud
google-cloud-bigquerystorage
- 3.7.0
+ 3.8.0
com.google.cloud
google-cloud-bigquery
- 2.41.0
+ 2.42.0
org.apache.avro
diff --git a/samples/pom.xml b/samples/pom.xml
index 03cba56b88..bda0b500a6 100644
--- a/samples/pom.xml
+++ b/samples/pom.xml
@@ -38,7 +38,7 @@
org.apache.maven.plugins
maven-deploy-plugin
- 3.1.2
+ 3.1.3
true
diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml
index a44cfa9788..6f7bbc43ea 100644
--- a/samples/snapshot/pom.xml
+++ b/samples/snapshot/pom.xml
@@ -29,14 +29,14 @@
com.google.cloud
google-cloud-bigquerystorage
- 3.8.0
+ 3.8.1
com.google.cloud
google-cloud-bigquery
- 2.41.0
+ 2.42.0
org.apache.avro
diff --git a/samples/snippets/pom.xml b/samples/snippets/pom.xml
index 0b83bf42c2..57b1b64118 100644
--- a/samples/snippets/pom.xml
+++ b/samples/snippets/pom.xml
@@ -31,7 +31,7 @@
com.google.cloud
libraries-bom
- 26.43.0
+ 26.44.0
pom
import
@@ -48,7 +48,7 @@
com.google.cloud
google-cloud-bigquery
- 2.41.0
+ 2.42.0
org.apache.avro
diff --git a/samples/snippets/src/main/java/com/example/bigquerystorage/WriteBufferedStream.java b/samples/snippets/src/main/java/com/example/bigquerystorage/WriteBufferedStream.java
index 718b5373d6..77edd27a81 100644
--- a/samples/snippets/src/main/java/com/example/bigquerystorage/WriteBufferedStream.java
+++ b/samples/snippets/src/main/java/com/example/bigquerystorage/WriteBufferedStream.java
@@ -78,7 +78,7 @@ public static void writeBufferedStream(String projectId, String datasetName, Str
// Use the JSON stream writer to send records in JSON format.
// For more information about JsonStreamWriter, see:
- // https://googleapis.dev/java/google-cloud-bigquerystorage/latest/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriter.html
+ // https://cloud.google.com/java/docs/reference/google-cloud-bigquerystorage/latest/com.google.cloud.bigquery.storage.v1.JsonStreamWriter
try (JsonStreamWriter writer =
JsonStreamWriter.newBuilder(writeStream.getName(), writeStream.getTableSchema())
.setRetrySettings(retrySettings)
diff --git a/samples/snippets/src/main/java/com/example/bigquerystorage/WritePendingStream.java b/samples/snippets/src/main/java/com/example/bigquerystorage/WritePendingStream.java
index e8de163ed5..b9a9bb2479 100644
--- a/samples/snippets/src/main/java/com/example/bigquerystorage/WritePendingStream.java
+++ b/samples/snippets/src/main/java/com/example/bigquerystorage/WritePendingStream.java
@@ -152,7 +152,7 @@ void initialize(TableName parentTable, BigQueryWriteClient client)
// Use the JSON stream writer to send records in JSON format.
// For more information about JsonStreamWriter, see:
- // https://googleapis.dev/java/google-cloud-bigquerystorage/latest/com/google/cloud/bigquery/storage/v1beta2/JsonStreamWriter.html
+ // https://cloud.google.com/java/docs/reference/google-cloud-bigquerystorage/latest/com.google.cloud.bigquery.storage.v1.JsonStreamWriter
streamWriter =
JsonStreamWriter.newBuilder(writeStream.getName(), writeStream.getTableSchema())
.setRetrySettings(retrySettings)
diff --git a/versions.txt b/versions.txt
index 0d106e629c..1dc44ebd2d 100644
--- a/versions.txt
+++ b/versions.txt
@@ -1,10 +1,10 @@
# Format:
# module:released-version:current-version
-google-cloud-bigquerystorage:3.8.0:3.8.0
-grpc-google-cloud-bigquerystorage-v1beta1:0.180.0:0.180.0
-grpc-google-cloud-bigquerystorage-v1beta2:0.180.0:0.180.0
-grpc-google-cloud-bigquerystorage-v1:3.8.0:3.8.0
-proto-google-cloud-bigquerystorage-v1beta1:0.180.0:0.180.0
-proto-google-cloud-bigquerystorage-v1beta2:0.180.0:0.180.0
-proto-google-cloud-bigquerystorage-v1:3.8.0:3.8.0
+google-cloud-bigquerystorage:3.8.1:3.8.1
+grpc-google-cloud-bigquerystorage-v1beta1:0.180.1:0.180.1
+grpc-google-cloud-bigquerystorage-v1beta2:0.180.1:0.180.1
+grpc-google-cloud-bigquerystorage-v1:3.8.1:3.8.1
+proto-google-cloud-bigquerystorage-v1beta1:0.180.1:0.180.1
+proto-google-cloud-bigquerystorage-v1beta2:0.180.1:0.180.1
+proto-google-cloud-bigquerystorage-v1:3.8.1:3.8.1