*
* try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- * String parent = "";
+ * ProjectName parent = ProjectName.of("[PROJECT]");
* ReadSession readSession = ReadSession.newBuilder().build();
* int maxStreamCount = 0;
* ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
@@ -173,7 +173,7 @@ public BigQueryReadStub getStub() {
*
*
* try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- * String parent = "";
+ * ProjectName parent = ProjectName.of("[PROJECT]");
* ReadSession readSession = ReadSession.newBuilder().build();
* int maxStreamCount = 0;
* ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
@@ -191,6 +191,58 @@ public BigQueryReadStub getStub() {
* Streams must be read starting from offset 0.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
+ public final ReadSession createReadSession(
+ ProjectName parent, ReadSession readSession, int maxStreamCount) {
+ CreateReadSessionRequest request =
+ CreateReadSessionRequest.newBuilder()
+ .setParent(parent == null ? null : parent.toString())
+ .setReadSession(readSession)
+ .setMaxStreamCount(maxStreamCount)
+ .build();
+ return createReadSession(request);
+ }
+
+ // AUTO-GENERATED DOCUMENTATION AND METHOD
+ /**
+ * Creates a new read session. A read session divides the contents of a BigQuery table into one or
+ * more streams, which can then be used to read data from the table. The read session also
+ * specifies properties of the data to be read, such as a list of columns or a push-down filter
+ * describing the rows to be returned.
+ *
+ *
A particular row can be read by at most one stream. When the caller has reached the end of
+ * each stream in the session, then all the data in the table has been read.
+ *
+ *
Data is assigned to each stream such that roughly the same number of rows can be read from
+ * each stream. Because the server-side unit for assigning data is collections of rows, the API
+ * does not guarantee that each stream will return the same number or rows. Additionally, the
+ * limits are enforced based on the number of pre-filtered rows, so some filters can lead to
+ * lopsided assignments.
+ *
+ *
Read sessions automatically expire 24 hours after they are created and do not require manual
+ * clean-up by the caller.
+ *
+ *
Sample code:
+ *
+ *
+ * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+ * ProjectName parent = ProjectName.of("[PROJECT]");
+ * ReadSession readSession = ReadSession.newBuilder().build();
+ * int maxStreamCount = 0;
+ * ReadSession response = baseBigQueryReadClient.createReadSession(parent.toString(), readSession, maxStreamCount);
+ * }
+ *
+ *
+ * @param parent Required. The request project that owns the session, in the form of
+ * `projects/{project_id}`.
+ * @param readSession Required. Session to be created.
+ * @param maxStreamCount Max initial number of streams. If unset or zero, the server will provide
+ * a value of streams so as to produce reasonable throughput. Must be non-negative. The number
+ * of streams may be lower than the requested number, depending on the amount parallelism that
+ * is reasonable for the table. Error will be returned if the max count is greater than the
+ * current system max limit of 1,000.
+ * Streams must be read starting from offset 0.
+ * @throws com.google.api.gax.rpc.ApiException if the remote call fails
+ */
public final ReadSession createReadSession(
String parent, ReadSession readSession, int maxStreamCount) {
CreateReadSessionRequest request =
@@ -225,7 +277,12 @@ public final ReadSession createReadSession(
*
*
* try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- * CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder().build();
+ * ProjectName parent = ProjectName.of("[PROJECT]");
+ * ReadSession readSession = ReadSession.newBuilder().build();
+ * CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
+ * .setParent(parent.toString())
+ * .setReadSession(readSession)
+ * .build();
* ReadSession response = baseBigQueryReadClient.createReadSession(request);
* }
*
@@ -260,7 +317,12 @@ public final ReadSession createReadSession(CreateReadSessionRequest request) {
*
*
* try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- * CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder().build();
+ * ProjectName parent = ProjectName.of("[PROJECT]");
+ * ReadSession readSession = ReadSession.newBuilder().build();
+ * CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
+ * .setParent(parent.toString())
+ * .setReadSession(readSession)
+ * .build();
* ApiFuture<ReadSession> future = baseBigQueryReadClient.createReadSessionCallable().futureCall(request);
* // Do something
* ReadSession response = future.get();
@@ -284,7 +346,10 @@ public final UnaryCallable createReadSess
*
*
* try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- * ReadRowsRequest request = ReadRowsRequest.newBuilder().build();
+ * ReadStreamName readStream = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
+ * ReadRowsRequest request = ReadRowsRequest.newBuilder()
+ * .setReadStream(readStream.toString())
+ * .build();
*
* ServerStream<ReadRowsResponse> stream = baseBigQueryReadClient.readRowsCallable().call(request);
* for (ReadRowsResponse response : stream) {
@@ -314,7 +379,10 @@ public final ServerStreamingCallable readRows
*
*
* try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- * SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder().build();
+ * ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
+ * SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
+ * .setName(name.toString())
+ * .build();
* SplitReadStreamResponse response = baseBigQueryReadClient.splitReadStream(request);
* }
*
@@ -343,7 +411,10 @@ public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest requ
*
*
* try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- * SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder().build();
+ * ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
+ * SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
+ * .setName(name.toString())
+ * .build();
* ApiFuture<SplitReadStreamResponse> future = baseBigQueryReadClient.splitReadStreamCallable().futureCall(request);
* // Do something
* SplitReadStreamResponse response = future.get();
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/package-info.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/package-info.java
index 0228be6042..a29e6a13d4 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/package-info.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/package-info.java
@@ -30,7 +30,7 @@
*
*
* try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- * String parent = "";
+ * ProjectName parent = ProjectName.of("[PROJECT]");
* ReadSession readSession = ReadSession.newBuilder().build();
* int maxStreamCount = 0;
* ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java
index 12053b177e..3606e6f57b 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClient.java
@@ -46,8 +46,9 @@
*
*
* try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
- * CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder().build();
- * WriteStream response = bigQueryWriteClient.createWriteStream(request);
+ * TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ * WriteStream writeStream = WriteStream.newBuilder().build();
+ * WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
* }
*
*
@@ -164,7 +165,65 @@ public BigQueryWriteStub getStub() {
*
*
* try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
- * CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder().build();
+ * TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ * WriteStream writeStream = WriteStream.newBuilder().build();
+ * WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
+ * }
+ *
+ *
+ * @param parent Required. Reference to the table to which the stream belongs, in the format of
+ * `projects/{project}/datasets/{dataset}/tables/{table}`.
+ * @param writeStream Required. Stream to be created.
+ * @throws com.google.api.gax.rpc.ApiException if the remote call fails
+ */
+ public final WriteStream createWriteStream(TableName parent, WriteStream writeStream) {
+ CreateWriteStreamRequest request =
+ CreateWriteStreamRequest.newBuilder()
+ .setParent(parent == null ? null : parent.toString())
+ .setWriteStream(writeStream)
+ .build();
+ return createWriteStream(request);
+ }
+
+ // AUTO-GENERATED DOCUMENTATION AND METHOD
+ /**
+ * Creates a write stream to the given table.
+ *
+ * Sample code:
+ *
+ *
+ * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+ * TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ * WriteStream writeStream = WriteStream.newBuilder().build();
+ * WriteStream response = bigQueryWriteClient.createWriteStream(parent.toString(), writeStream);
+ * }
+ *
+ *
+ * @param parent Required. Reference to the table to which the stream belongs, in the format of
+ * `projects/{project}/datasets/{dataset}/tables/{table}`.
+ * @param writeStream Required. Stream to be created.
+ * @throws com.google.api.gax.rpc.ApiException if the remote call fails
+ */
+ public final WriteStream createWriteStream(String parent, WriteStream writeStream) {
+ CreateWriteStreamRequest request =
+ CreateWriteStreamRequest.newBuilder().setParent(parent).setWriteStream(writeStream).build();
+ return createWriteStream(request);
+ }
+
+ // AUTO-GENERATED DOCUMENTATION AND METHOD
+ /**
+ * Creates a write stream to the given table.
+ *
+ * Sample code:
+ *
+ *
+ * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+ * TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ * WriteStream writeStream = WriteStream.newBuilder().build();
+ * CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder()
+ * .setParent(parent.toString())
+ * .setWriteStream(writeStream)
+ * .build();
* WriteStream response = bigQueryWriteClient.createWriteStream(request);
* }
*
@@ -184,7 +243,12 @@ public final WriteStream createWriteStream(CreateWriteStreamRequest request) {
*
*
* try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
- * CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder().build();
+ * TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ * WriteStream writeStream = WriteStream.newBuilder().build();
+ * CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder()
+ * .setParent(parent.toString())
+ * .setWriteStream(writeStream)
+ * .build();
* ApiFuture<WriteStream> future = bigQueryWriteClient.createWriteStreamCallable().futureCall(request);
* // Do something
* WriteStream response = future.get();
@@ -221,7 +285,10 @@ public final UnaryCallable createWriteStr
* BidiStream<AppendRowsRequest, AppendRowsResponse> bidiStream =
* bigQueryWriteClient.appendRowsCallable().call();
*
- * AppendRowsRequest request = AppendRowsRequest.newBuilder().build();
+ * WriteStreamName writeStream = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+ * AppendRowsRequest request = AppendRowsRequest.newBuilder()
+ * .setWriteStream(writeStream.toString())
+ * .build();
* bidiStream.send(request);
* for (AppendRowsResponse response : bidiStream) {
* // Do something when receive a response
@@ -241,7 +308,55 @@ public final BidiStreamingCallable append
*
*
* try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
- * GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder().build();
+ * WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+ * WriteStream response = bigQueryWriteClient.getWriteStream(name);
+ * }
+ *
+ *
+ * @param name Required. Name of the stream to get, in the form of
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
+ * @throws com.google.api.gax.rpc.ApiException if the remote call fails
+ */
+ public final WriteStream getWriteStream(WriteStreamName name) {
+ GetWriteStreamRequest request =
+ GetWriteStreamRequest.newBuilder().setName(name == null ? null : name.toString()).build();
+ return getWriteStream(request);
+ }
+
+ // AUTO-GENERATED DOCUMENTATION AND METHOD
+ /**
+ * Gets a write stream.
+ *
+ * Sample code:
+ *
+ *
+ * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+ * WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+ * WriteStream response = bigQueryWriteClient.getWriteStream(name.toString());
+ * }
+ *
+ *
+ * @param name Required. Name of the stream to get, in the form of
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
+ * @throws com.google.api.gax.rpc.ApiException if the remote call fails
+ */
+ public final WriteStream getWriteStream(String name) {
+ GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder().setName(name).build();
+ return getWriteStream(request);
+ }
+
+ // AUTO-GENERATED DOCUMENTATION AND METHOD
+ /**
+ * Gets a write stream.
+ *
+ * Sample code:
+ *
+ *
+ * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+ * WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+ * GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder()
+ * .setName(name.toString())
+ * .build();
* WriteStream response = bigQueryWriteClient.getWriteStream(request);
* }
*
@@ -261,7 +376,10 @@ public final WriteStream getWriteStream(GetWriteStreamRequest request) {
*
*
* try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
- * GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder().build();
+ * WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+ * GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder()
+ * .setName(name.toString())
+ * .build();
* ApiFuture<WriteStream> future = bigQueryWriteClient.getWriteStreamCallable().futureCall(request);
* // Do something
* WriteStream response = future.get();
@@ -280,7 +398,58 @@ public final UnaryCallable getWriteStreamCal
*
*
* try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
- * FinalizeWriteStreamRequest request = FinalizeWriteStreamRequest.newBuilder().build();
+ * WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+ * FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(name);
+ * }
+ *
+ *
+ * @param name Required. Name of the stream to finalize, in the form of
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
+ * @throws com.google.api.gax.rpc.ApiException if the remote call fails
+ */
+ public final FinalizeWriteStreamResponse finalizeWriteStream(WriteStreamName name) {
+ FinalizeWriteStreamRequest request =
+ FinalizeWriteStreamRequest.newBuilder()
+ .setName(name == null ? null : name.toString())
+ .build();
+ return finalizeWriteStream(request);
+ }
+
+ // AUTO-GENERATED DOCUMENTATION AND METHOD
+ /**
+ * Finalize a write stream so that no new data can be appended to the stream.
+ *
+ * Sample code:
+ *
+ *
+ * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+ * WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+ * FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(name.toString());
+ * }
+ *
+ *
+ * @param name Required. Name of the stream to finalize, in the form of
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
+ * @throws com.google.api.gax.rpc.ApiException if the remote call fails
+ */
+ public final FinalizeWriteStreamResponse finalizeWriteStream(String name) {
+ FinalizeWriteStreamRequest request =
+ FinalizeWriteStreamRequest.newBuilder().setName(name).build();
+ return finalizeWriteStream(request);
+ }
+
+ // AUTO-GENERATED DOCUMENTATION AND METHOD
+ /**
+ * Finalize a write stream so that no new data can be appended to the stream.
+ *
+ * Sample code:
+ *
+ *
+ * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+ * WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+ * FinalizeWriteStreamRequest request = FinalizeWriteStreamRequest.newBuilder()
+ * .setName(name.toString())
+ * .build();
* FinalizeWriteStreamResponse response = bigQueryWriteClient.finalizeWriteStream(request);
* }
*
@@ -300,7 +469,10 @@ public final FinalizeWriteStreamResponse finalizeWriteStream(FinalizeWriteStream
*
*
* try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
- * FinalizeWriteStreamRequest request = FinalizeWriteStreamRequest.newBuilder().build();
+ * WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+ * FinalizeWriteStreamRequest request = FinalizeWriteStreamRequest.newBuilder()
+ * .setName(name.toString())
+ * .build();
* ApiFuture<FinalizeWriteStreamResponse> future = bigQueryWriteClient.finalizeWriteStreamCallable().futureCall(request);
* // Do something
* FinalizeWriteStreamResponse response = future.get();
@@ -322,7 +494,64 @@ public final FinalizeWriteStreamResponse finalizeWriteStream(FinalizeWriteStream
*
*
* try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
- * BatchCommitWriteStreamsRequest request = BatchCommitWriteStreamsRequest.newBuilder().build();
+ * TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ * BatchCommitWriteStreamsResponse response = bigQueryWriteClient.batchCommitWriteStreams(parent);
+ * }
+ *
+ *
+ * @param parent Required. Parent table that all the streams should belong to, in the form of
+ * `projects/{project}/datasets/{dataset}/tables/{table}`.
+ * @throws com.google.api.gax.rpc.ApiException if the remote call fails
+ */
+ public final BatchCommitWriteStreamsResponse batchCommitWriteStreams(TableName parent) {
+ BatchCommitWriteStreamsRequest request =
+ BatchCommitWriteStreamsRequest.newBuilder()
+ .setParent(parent == null ? null : parent.toString())
+ .build();
+ return batchCommitWriteStreams(request);
+ }
+
+ // AUTO-GENERATED DOCUMENTATION AND METHOD
+ /**
+ * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams
+ * must be finalized before commit and cannot be committed multiple times. Once a stream is
+ * committed, data in the stream becomes available for read operations.
+ *
+ * Sample code:
+ *
+ *
+ * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+ * TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ * BatchCommitWriteStreamsResponse response = bigQueryWriteClient.batchCommitWriteStreams(parent.toString());
+ * }
+ *
+ *
+ * @param parent Required. Parent table that all the streams should belong to, in the form of
+ * `projects/{project}/datasets/{dataset}/tables/{table}`.
+ * @throws com.google.api.gax.rpc.ApiException if the remote call fails
+ */
+ public final BatchCommitWriteStreamsResponse batchCommitWriteStreams(String parent) {
+ BatchCommitWriteStreamsRequest request =
+ BatchCommitWriteStreamsRequest.newBuilder().setParent(parent).build();
+ return batchCommitWriteStreams(request);
+ }
+
+ // AUTO-GENERATED DOCUMENTATION AND METHOD
+ /**
+ * Atomically commits a group of `PENDING` streams that belong to the same `parent` table. Streams
+ * must be finalized before commit and cannot be committed multiple times. Once a stream is
+ * committed, data in the stream becomes available for read operations.
+ *
+ * Sample code:
+ *
+ *
+ * try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
+ * TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ * List<String> writeStreams = new ArrayList<>();
+ * BatchCommitWriteStreamsRequest request = BatchCommitWriteStreamsRequest.newBuilder()
+ * .setParent(parent.toString())
+ * .addAllWriteStreams(writeStreams)
+ * .build();
* BatchCommitWriteStreamsResponse response = bigQueryWriteClient.batchCommitWriteStreams(request);
* }
*
@@ -345,7 +574,12 @@ public final BatchCommitWriteStreamsResponse batchCommitWriteStreams(
*
*
* try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
- * BatchCommitWriteStreamsRequest request = BatchCommitWriteStreamsRequest.newBuilder().build();
+ * TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ * List<String> writeStreams = new ArrayList<>();
+ * BatchCommitWriteStreamsRequest request = BatchCommitWriteStreamsRequest.newBuilder()
+ * .setParent(parent.toString())
+ * .addAllWriteStreams(writeStreams)
+ * .build();
* ApiFuture<BatchCommitWriteStreamsResponse> future = bigQueryWriteClient.batchCommitWriteStreamsCallable().futureCall(request);
* // Do something
* BatchCommitWriteStreamsResponse response = future.get();
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/StreamWriter.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/StreamWriter.java
index b9dd306935..c078fa41e3 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/StreamWriter.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/StreamWriter.java
@@ -250,10 +250,15 @@ public ApiFuture append(AppendRowsRequest message) {
* @throws IOException
*/
public void refreshAppend() throws IOException, InterruptedException {
+ LOG.info("Establish a write connection.");
synchronized (this) {
- Preconditions.checkState(!shutdown.get(), "Cannot shut down on a shut-down writer.");
+ if (shutdown.get()) {
+ LOG.warning("Cannot refresh on a already shutdown writer.");
+ return;
+ }
// There could be a moment, stub is not yet initialized.
if (clientStream != null) {
+ LOG.info("Closing the stream");
clientStream.closeSend();
}
messagesBatch.resetAttachSchema();
@@ -348,13 +353,15 @@ private static final class InflightBatch {
final List inflightRequests;
// A list tracks expected offset for each AppendRequest. Used to reconstruct the Response
// future.
- final ArrayList offsetList;
- final long creationTime;
- int attempt;
- long batchSizeBytes;
- long expectedOffset;
- Boolean attachSchema;
- String streamName;
+ private final ArrayList offsetList;
+ private final long creationTime;
+ private int attempt;
+ private long batchSizeBytes;
+ private long expectedOffset;
+ private Boolean attachSchema;
+ private String streamName;
+
+ private final AtomicBoolean failed;
InflightBatch(
List inflightRequests,
@@ -376,6 +383,7 @@ private static final class InflightBatch {
this.batchSizeBytes = batchSizeBytes;
this.attachSchema = attachSchema;
this.streamName = streamName;
+ this.failed = new AtomicBoolean(false);
}
int count() {
@@ -417,6 +425,13 @@ private AppendRowsRequest getMergedRequest() throws IllegalStateException {
}
private void onFailure(Throwable t) {
+ if (failed.getAndSet(true)) {
+ // Error has been set already.
+ LOG.warning("Ignore " + t.toString() + " since error has already been set");
+ return;
+ } else {
+ LOG.fine("Setting " + t.toString() + " on response");
+ }
for (AppendRequestAndFutureResponse request : inflightRequests) {
request.appendResult.setException(t);
}
@@ -838,8 +853,10 @@ public void onError(Throwable t) {
}
inflightBatch.onFailure(t);
try {
- // Establish a new connection.
- streamWriter.refreshAppend();
+ if (!streamWriter.shutdown.get()) {
+ // Establish a new connection.
+ streamWriter.refreshAppend();
+ }
} catch (IOException | InterruptedException e) {
LOG.info("Failed to establish a new connection");
}
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/package-info.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/package-info.java
index 71d0717909..d5a0a66695 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/package-info.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/package-info.java
@@ -30,8 +30,9 @@
*
*
* try (BigQueryWriteClient bigQueryWriteClient = BigQueryWriteClient.create()) {
- * CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder().build();
- * WriteStream response = bigQueryWriteClient.createWriteStream(request);
+ * TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ * WriteStream writeStream = WriteStream.newBuilder().build();
+ * WriteStream response = bigQueryWriteClient.createWriteStream(parent, writeStream);
* }
*
*
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java
index 4ba8441d0b..c587b00d52 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta1/BaseBigQueryStorageClient.java
@@ -413,7 +413,7 @@ public final BatchCreateReadSessionStreamsResponse batchCreateReadSessionStreams
* }
*
*
- * @param stream Stream to finalize.
+ * @param stream Required. Stream to finalize.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final void finalizeStream(Stream stream) {
@@ -509,7 +509,7 @@ public final UnaryCallable finalizeStreamCallable(
* }
*
*
- * @param originalStream Stream to split.
+ * @param originalStream Required. Stream to split.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final SplitReadStreamResponse splitReadStream(Stream originalStream) {
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java
index a8fd3df469..1554feb7df 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClient.java
@@ -37,7 +37,7 @@
*
*
* try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- * String parent = "";
+ * ProjectName parent = ProjectName.of("[PROJECT]");
* ReadSession readSession = ReadSession.newBuilder().build();
* int maxStreamCount = 0;
* ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
@@ -173,7 +173,7 @@ public BigQueryReadStub getStub() {
*
*
* try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- * String parent = "";
+ * ProjectName parent = ProjectName.of("[PROJECT]");
* ReadSession readSession = ReadSession.newBuilder().build();
* int maxStreamCount = 0;
* ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
@@ -191,6 +191,58 @@ public BigQueryReadStub getStub() {
* Streams must be read starting from offset 0.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
+ public final ReadSession createReadSession(
+ ProjectName parent, ReadSession readSession, int maxStreamCount) {
+ CreateReadSessionRequest request =
+ CreateReadSessionRequest.newBuilder()
+ .setParent(parent == null ? null : parent.toString())
+ .setReadSession(readSession)
+ .setMaxStreamCount(maxStreamCount)
+ .build();
+ return createReadSession(request);
+ }
+
+ // AUTO-GENERATED DOCUMENTATION AND METHOD
+ /**
+ * Creates a new read session. A read session divides the contents of a BigQuery table into one or
+ * more streams, which can then be used to read data from the table. The read session also
+ * specifies properties of the data to be read, such as a list of columns or a push-down filter
+ * describing the rows to be returned.
+ *
+ *
A particular row can be read by at most one stream. When the caller has reached the end of
+ * each stream in the session, then all the data in the table has been read.
+ *
+ *
Data is assigned to each stream such that roughly the same number of rows can be read from
+ * each stream. Because the server-side unit for assigning data is collections of rows, the API
+ * does not guarantee that each stream will return the same number or rows. Additionally, the
+ * limits are enforced based on the number of pre-filtered rows, so some filters can lead to
+ * lopsided assignments.
+ *
+ *
Read sessions automatically expire 24 hours after they are created and do not require manual
+ * clean-up by the caller.
+ *
+ *
Sample code:
+ *
+ *
+ * try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
+ * ProjectName parent = ProjectName.of("[PROJECT]");
+ * ReadSession readSession = ReadSession.newBuilder().build();
+ * int maxStreamCount = 0;
+ * ReadSession response = baseBigQueryReadClient.createReadSession(parent.toString(), readSession, maxStreamCount);
+ * }
+ *
+ *
+ * @param parent Required. The request project that owns the session, in the form of
+ * `projects/{project_id}`.
+ * @param readSession Required. Session to be created.
+ * @param maxStreamCount Max initial number of streams. If unset or zero, the server will provide
+ * a value of streams so as to produce reasonable throughput. Must be non-negative. The number
+ * of streams may be lower than the requested number, depending on the amount parallelism that
+ * is reasonable for the table. Error will be returned if the max count is greater than the
+ * current system max limit of 1,000.
+ * Streams must be read starting from offset 0.
+ * @throws com.google.api.gax.rpc.ApiException if the remote call fails
+ */
public final ReadSession createReadSession(
String parent, ReadSession readSession, int maxStreamCount) {
CreateReadSessionRequest request =
@@ -225,7 +277,12 @@ public final ReadSession createReadSession(
*
*
* try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- * CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder().build();
+ * ProjectName parent = ProjectName.of("[PROJECT]");
+ * ReadSession readSession = ReadSession.newBuilder().build();
+ * CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
+ * .setParent(parent.toString())
+ * .setReadSession(readSession)
+ * .build();
* ReadSession response = baseBigQueryReadClient.createReadSession(request);
* }
*
@@ -260,7 +317,12 @@ public final ReadSession createReadSession(CreateReadSessionRequest request) {
*
*
* try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- * CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder().build();
+ * ProjectName parent = ProjectName.of("[PROJECT]");
+ * ReadSession readSession = ReadSession.newBuilder().build();
+ * CreateReadSessionRequest request = CreateReadSessionRequest.newBuilder()
+ * .setParent(parent.toString())
+ * .setReadSession(readSession)
+ * .build();
* ApiFuture<ReadSession> future = baseBigQueryReadClient.createReadSessionCallable().futureCall(request);
* // Do something
* ReadSession response = future.get();
@@ -284,7 +346,10 @@ public final UnaryCallable createReadSess
*
*
* try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- * ReadRowsRequest request = ReadRowsRequest.newBuilder().build();
+ * ReadStreamName readStream = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
+ * ReadRowsRequest request = ReadRowsRequest.newBuilder()
+ * .setReadStream(readStream.toString())
+ * .build();
*
* ServerStream<ReadRowsResponse> stream = baseBigQueryReadClient.readRowsCallable().call(request);
* for (ReadRowsResponse response : stream) {
@@ -314,7 +379,10 @@ public final ServerStreamingCallable readRows
*
*
* try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- * SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder().build();
+ * ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
+ * SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
+ * .setName(name.toString())
+ * .build();
* SplitReadStreamResponse response = baseBigQueryReadClient.splitReadStream(request);
* }
*
@@ -343,7 +411,10 @@ public final SplitReadStreamResponse splitReadStream(SplitReadStreamRequest requ
*
*
* try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- * SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder().build();
+ * ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
+ * SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder()
+ * .setName(name.toString())
+ * .build();
* ApiFuture<SplitReadStreamResponse> future = baseBigQueryReadClient.splitReadStreamCallable().futureCall(request);
* // Do something
* SplitReadStreamResponse response = future.get();
diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/package-info.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/package-info.java
index a53f25fb50..fade85f7c9 100644
--- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/package-info.java
+++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1beta2/package-info.java
@@ -30,7 +30,7 @@
*
*
* try (BaseBigQueryReadClient baseBigQueryReadClient = BaseBigQueryReadClient.create()) {
- * String parent = "";
+ * ProjectName parent = ProjectName.of("[PROJECT]");
* ReadSession readSession = ReadSession.newBuilder().build();
* int maxStreamCount = 0;
* ReadSession response = baseBigQueryReadClient.createReadSession(parent, readSession, maxStreamCount);
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClientTest.java
index 9d26c04ab2..1217dca250 100644
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClientTest.java
+++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1/BaseBigQueryReadClientTest.java
@@ -81,12 +81,13 @@ public void tearDown() throws Exception {
@Test
@SuppressWarnings("all")
public void createReadSessionTest() {
- String name = "name3373707";
- String table = "table110115790";
- ReadSession expectedResponse = ReadSession.newBuilder().setName(name).setTable(table).build();
+ ReadSessionName name = ReadSessionName.of("[PROJECT]", "[LOCATION]", "[SESSION]");
+ TableName table = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ ReadSession expectedResponse =
+ ReadSession.newBuilder().setName(name.toString()).setTable(table.toString()).build();
mockBigQueryRead.addResponse(expectedResponse);
- String parent = "parent-995424086";
+ ProjectName parent = ProjectName.of("[PROJECT]");
ReadSession readSession = ReadSession.newBuilder().build();
int maxStreamCount = 940837515;
@@ -97,7 +98,7 @@ public void createReadSessionTest() {
Assert.assertEquals(1, actualRequests.size());
CreateReadSessionRequest actualRequest = (CreateReadSessionRequest) actualRequests.get(0);
- Assert.assertEquals(parent, actualRequest.getParent());
+ Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent()));
Assert.assertEquals(readSession, actualRequest.getReadSession());
Assert.assertEquals(maxStreamCount, actualRequest.getMaxStreamCount());
Assert.assertTrue(
@@ -113,7 +114,7 @@ public void createReadSessionExceptionTest() throws Exception {
mockBigQueryRead.addException(exception);
try {
- String parent = "parent-995424086";
+ ProjectName parent = ProjectName.of("[PROJECT]");
ReadSession readSession = ReadSession.newBuilder().build();
int maxStreamCount = 940837515;
@@ -130,7 +131,10 @@ public void readRowsTest() throws Exception {
long rowCount = 1340416618L;
ReadRowsResponse expectedResponse = ReadRowsResponse.newBuilder().setRowCount(rowCount).build();
mockBigQueryRead.addResponse(expectedResponse);
- ReadRowsRequest request = ReadRowsRequest.newBuilder().build();
+ ReadStreamName readStream =
+ ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
+ ReadRowsRequest request =
+ ReadRowsRequest.newBuilder().setReadStream(readStream.toString()).build();
MockStreamObserver responseObserver = new MockStreamObserver<>();
@@ -147,7 +151,10 @@ public void readRowsTest() throws Exception {
public void readRowsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockBigQueryRead.addException(exception);
- ReadRowsRequest request = ReadRowsRequest.newBuilder().build();
+ ReadStreamName readStream =
+ ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
+ ReadRowsRequest request =
+ ReadRowsRequest.newBuilder().setReadStream(readStream.toString()).build();
MockStreamObserver responseObserver = new MockStreamObserver<>();
@@ -170,7 +177,9 @@ public void splitReadStreamTest() {
SplitReadStreamResponse expectedResponse = SplitReadStreamResponse.newBuilder().build();
mockBigQueryRead.addResponse(expectedResponse);
- SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder().build();
+ ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
+ SplitReadStreamRequest request =
+ SplitReadStreamRequest.newBuilder().setName(name.toString()).build();
SplitReadStreamResponse actualResponse = client.splitReadStream(request);
Assert.assertEquals(expectedResponse, actualResponse);
@@ -179,6 +188,7 @@ public void splitReadStreamTest() {
Assert.assertEquals(1, actualRequests.size());
SplitReadStreamRequest actualRequest = (SplitReadStreamRequest) actualRequests.get(0);
+ Assert.assertEquals(name, ReadStreamName.parse(actualRequest.getName()));
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
@@ -192,7 +202,9 @@ public void splitReadStreamExceptionTest() throws Exception {
mockBigQueryRead.addException(exception);
try {
- SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder().build();
+ ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
+ SplitReadStreamRequest request =
+ SplitReadStreamRequest.newBuilder().setName(name.toString()).build();
client.splitReadStream(request);
Assert.fail("No exception raised");
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClientTest.java
index 661d422e74..2c990f039b 100644
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClientTest.java
+++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/BigQueryWriteClientTest.java
@@ -91,21 +91,24 @@ public void tearDown() throws Exception {
@Test
@SuppressWarnings("all")
public void createWriteStreamTest() {
- String name = "name3373707";
+ WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
String externalId = "externalId-1153075697";
WriteStream expectedResponse =
- WriteStream.newBuilder().setName(name).setExternalId(externalId).build();
+ WriteStream.newBuilder().setName(name.toString()).setExternalId(externalId).build();
mockBigQueryWrite.addResponse(expectedResponse);
- CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder().build();
+ TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ WriteStream writeStream = WriteStream.newBuilder().build();
- WriteStream actualResponse = client.createWriteStream(request);
+ WriteStream actualResponse = client.createWriteStream(parent, writeStream);
Assert.assertEquals(expectedResponse, actualResponse);
List actualRequests = mockBigQueryWrite.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateWriteStreamRequest actualRequest = (CreateWriteStreamRequest) actualRequests.get(0);
+ Assert.assertEquals(parent, TableName.parse(actualRequest.getParent()));
+ Assert.assertEquals(writeStream, actualRequest.getWriteStream());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
@@ -119,9 +122,10 @@ public void createWriteStreamExceptionTest() throws Exception {
mockBigQueryWrite.addException(exception);
try {
- CreateWriteStreamRequest request = CreateWriteStreamRequest.newBuilder().build();
+ TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ WriteStream writeStream = WriteStream.newBuilder().build();
- client.createWriteStream(request);
+ client.createWriteStream(parent, writeStream);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
@@ -134,7 +138,10 @@ public void appendRowsTest() throws Exception {
long offset = 1019779949L;
AppendRowsResponse expectedResponse = AppendRowsResponse.newBuilder().setOffset(offset).build();
mockBigQueryWrite.addResponse(expectedResponse);
- AppendRowsRequest request = AppendRowsRequest.newBuilder().build();
+ WriteStreamName writeStream =
+ WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+ AppendRowsRequest request =
+ AppendRowsRequest.newBuilder().setWriteStream(writeStream.toString()).build();
MockStreamObserver responseObserver = new MockStreamObserver<>();
@@ -156,7 +163,10 @@ public void appendRowsTest() throws Exception {
public void appendRowsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockBigQueryWrite.addException(exception);
- AppendRowsRequest request = AppendRowsRequest.newBuilder().build();
+ WriteStreamName writeStream =
+ WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
+ AppendRowsRequest request =
+ AppendRowsRequest.newBuilder().setWriteStream(writeStream.toString()).build();
MockStreamObserver responseObserver = new MockStreamObserver<>();
@@ -180,21 +190,22 @@ public void appendRowsExceptionTest() throws Exception {
@Test
@SuppressWarnings("all")
public void getWriteStreamTest() {
- String name = "name3373707";
+ WriteStreamName name2 = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
String externalId = "externalId-1153075697";
WriteStream expectedResponse =
- WriteStream.newBuilder().setName(name).setExternalId(externalId).build();
+ WriteStream.newBuilder().setName(name2.toString()).setExternalId(externalId).build();
mockBigQueryWrite.addResponse(expectedResponse);
- GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder().build();
+ WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
- WriteStream actualResponse = client.getWriteStream(request);
+ WriteStream actualResponse = client.getWriteStream(name);
Assert.assertEquals(expectedResponse, actualResponse);
List actualRequests = mockBigQueryWrite.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetWriteStreamRequest actualRequest = (GetWriteStreamRequest) actualRequests.get(0);
+ Assert.assertEquals(name, WriteStreamName.parse(actualRequest.getName()));
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
@@ -208,9 +219,9 @@ public void getWriteStreamExceptionTest() throws Exception {
mockBigQueryWrite.addException(exception);
try {
- GetWriteStreamRequest request = GetWriteStreamRequest.newBuilder().build();
+ WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
- client.getWriteStream(request);
+ client.getWriteStream(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
@@ -225,15 +236,16 @@ public void finalizeWriteStreamTest() {
FinalizeWriteStreamResponse.newBuilder().setRowCount(rowCount).build();
mockBigQueryWrite.addResponse(expectedResponse);
- FinalizeWriteStreamRequest request = FinalizeWriteStreamRequest.newBuilder().build();
+ WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
- FinalizeWriteStreamResponse actualResponse = client.finalizeWriteStream(request);
+ FinalizeWriteStreamResponse actualResponse = client.finalizeWriteStream(name);
Assert.assertEquals(expectedResponse, actualResponse);
List actualRequests = mockBigQueryWrite.getRequests();
Assert.assertEquals(1, actualRequests.size());
FinalizeWriteStreamRequest actualRequest = (FinalizeWriteStreamRequest) actualRequests.get(0);
+ Assert.assertEquals(name, WriteStreamName.parse(actualRequest.getName()));
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
@@ -247,9 +259,9 @@ public void finalizeWriteStreamExceptionTest() throws Exception {
mockBigQueryWrite.addException(exception);
try {
- FinalizeWriteStreamRequest request = FinalizeWriteStreamRequest.newBuilder().build();
+ WriteStreamName name = WriteStreamName.of("[PROJECT]", "[DATASET]", "[TABLE]", "[STREAM]");
- client.finalizeWriteStream(request);
+ client.finalizeWriteStream(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
@@ -263,9 +275,9 @@ public void batchCommitWriteStreamsTest() {
BatchCommitWriteStreamsResponse.newBuilder().build();
mockBigQueryWrite.addResponse(expectedResponse);
- BatchCommitWriteStreamsRequest request = BatchCommitWriteStreamsRequest.newBuilder().build();
+ TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
- BatchCommitWriteStreamsResponse actualResponse = client.batchCommitWriteStreams(request);
+ BatchCommitWriteStreamsResponse actualResponse = client.batchCommitWriteStreams(parent);
Assert.assertEquals(expectedResponse, actualResponse);
List actualRequests = mockBigQueryWrite.getRequests();
@@ -273,6 +285,7 @@ public void batchCommitWriteStreamsTest() {
BatchCommitWriteStreamsRequest actualRequest =
(BatchCommitWriteStreamsRequest) actualRequests.get(0);
+ Assert.assertEquals(parent, TableName.parse(actualRequest.getParent()));
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
@@ -286,9 +299,9 @@ public void batchCommitWriteStreamsExceptionTest() throws Exception {
mockBigQueryWrite.addException(exception);
try {
- BatchCommitWriteStreamsRequest request = BatchCommitWriteStreamsRequest.newBuilder().build();
+ TableName parent = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
- client.batchCommitWriteStreams(request);
+ client.batchCommitWriteStreams(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/StreamWriterTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/StreamWriterTest.java
index 3dba7524d0..7e8babb2dd 100644
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/StreamWriterTest.java
+++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1alpha2/StreamWriterTest.java
@@ -424,16 +424,28 @@ public void testFlowControlBehaviorException() throws Exception {
.build())
.build())
.build()) {
+ assertEquals(
+ 1L,
+ writer
+ .getBatchingSettings()
+ .getFlowControlSettings()
+ .getMaxOutstandingElementCount()
+ .longValue());
testBigQueryWrite.addResponse(AppendRowsResponse.newBuilder().setOffset(1L).build());
+ testBigQueryWrite.setResponseDelay(Duration.ofSeconds(10));
ApiFuture appendFuture1 = sendTestMessage(writer, new String[] {"A"});
ApiFuture appendFuture2 = sendTestMessage(writer, new String[] {"B"});
+ // Wait is necessary for response to be scheduled before timer is advanced.
+ Thread.sleep(5000L);
+ fakeExecutor.advanceTime(Duration.ofSeconds(10));
try {
appendFuture2.get();
Assert.fail("This should fail");
} catch (Exception e) {
- LOG.info("ControlFlow test exception: " + e.toString());
- assertEquals("The maximum number of batch elements: 1 have been reached.", e.getMessage());
+ assertEquals(
+ "java.util.concurrent.ExecutionException: The maximum number of batch elements: 1 have been reached.",
+ e.toString());
}
assertEquals(1L, appendFuture1.get().getOffset());
}
@@ -500,6 +512,7 @@ public void testStreamReconnectionExceedRetry() throws Exception {
.setMaxAttempts(1)
.build())
.build();
+ assertEquals(1, writer.getRetrySettings().getMaxAttempts());
StatusRuntimeException transientError = new StatusRuntimeException(Status.UNAVAILABLE);
testBigQueryWrite.addException(transientError);
testBigQueryWrite.addException(transientError);
@@ -813,7 +826,7 @@ public void testAwaitTermination() throws Exception {
testBigQueryWrite.addResponse(AppendRowsResponse.newBuilder().build());
ApiFuture appendFuture1 = sendTestMessage(writer, new String[] {"A"});
writer.shutdown();
- assertTrue(writer.awaitTermination(1, TimeUnit.MINUTES));
+ assertTrue(writer.awaitTermination(2, TimeUnit.MINUTES));
}
@Test
diff --git a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClientTest.java b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClientTest.java
index 1b9363462f..f91d101a56 100644
--- a/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClientTest.java
+++ b/google-cloud-bigquerystorage/src/test/java/com/google/cloud/bigquery/storage/v1beta2/BaseBigQueryReadClientTest.java
@@ -81,12 +81,13 @@ public void tearDown() throws Exception {
@Test
@SuppressWarnings("all")
public void createReadSessionTest() {
- String name = "name3373707";
- String table = "table110115790";
- ReadSession expectedResponse = ReadSession.newBuilder().setName(name).setTable(table).build();
+ ReadSessionName name = ReadSessionName.of("[PROJECT]", "[LOCATION]", "[SESSION]");
+ TableName table = TableName.of("[PROJECT]", "[DATASET]", "[TABLE]");
+ ReadSession expectedResponse =
+ ReadSession.newBuilder().setName(name.toString()).setTable(table.toString()).build();
mockBigQueryRead.addResponse(expectedResponse);
- String parent = "parent-995424086";
+ ProjectName parent = ProjectName.of("[PROJECT]");
ReadSession readSession = ReadSession.newBuilder().build();
int maxStreamCount = 940837515;
@@ -97,7 +98,7 @@ public void createReadSessionTest() {
Assert.assertEquals(1, actualRequests.size());
CreateReadSessionRequest actualRequest = (CreateReadSessionRequest) actualRequests.get(0);
- Assert.assertEquals(parent, actualRequest.getParent());
+ Assert.assertEquals(parent, ProjectName.parse(actualRequest.getParent()));
Assert.assertEquals(readSession, actualRequest.getReadSession());
Assert.assertEquals(maxStreamCount, actualRequest.getMaxStreamCount());
Assert.assertTrue(
@@ -113,7 +114,7 @@ public void createReadSessionExceptionTest() throws Exception {
mockBigQueryRead.addException(exception);
try {
- String parent = "parent-995424086";
+ ProjectName parent = ProjectName.of("[PROJECT]");
ReadSession readSession = ReadSession.newBuilder().build();
int maxStreamCount = 940837515;
@@ -130,7 +131,10 @@ public void readRowsTest() throws Exception {
long rowCount = 1340416618L;
ReadRowsResponse expectedResponse = ReadRowsResponse.newBuilder().setRowCount(rowCount).build();
mockBigQueryRead.addResponse(expectedResponse);
- ReadRowsRequest request = ReadRowsRequest.newBuilder().build();
+ ReadStreamName readStream =
+ ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
+ ReadRowsRequest request =
+ ReadRowsRequest.newBuilder().setReadStream(readStream.toString()).build();
MockStreamObserver responseObserver = new MockStreamObserver<>();
@@ -147,7 +151,10 @@ public void readRowsTest() throws Exception {
public void readRowsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(Status.INVALID_ARGUMENT);
mockBigQueryRead.addException(exception);
- ReadRowsRequest request = ReadRowsRequest.newBuilder().build();
+ ReadStreamName readStream =
+ ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
+ ReadRowsRequest request =
+ ReadRowsRequest.newBuilder().setReadStream(readStream.toString()).build();
MockStreamObserver responseObserver = new MockStreamObserver<>();
@@ -170,7 +177,9 @@ public void splitReadStreamTest() {
SplitReadStreamResponse expectedResponse = SplitReadStreamResponse.newBuilder().build();
mockBigQueryRead.addResponse(expectedResponse);
- SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder().build();
+ ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
+ SplitReadStreamRequest request =
+ SplitReadStreamRequest.newBuilder().setName(name.toString()).build();
SplitReadStreamResponse actualResponse = client.splitReadStream(request);
Assert.assertEquals(expectedResponse, actualResponse);
@@ -179,6 +188,7 @@ public void splitReadStreamTest() {
Assert.assertEquals(1, actualRequests.size());
SplitReadStreamRequest actualRequest = (SplitReadStreamRequest) actualRequests.get(0);
+ Assert.assertEquals(name, ReadStreamName.parse(actualRequest.getName()));
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
@@ -192,7 +202,9 @@ public void splitReadStreamExceptionTest() throws Exception {
mockBigQueryRead.addException(exception);
try {
- SplitReadStreamRequest request = SplitReadStreamRequest.newBuilder().build();
+ ReadStreamName name = ReadStreamName.of("[PROJECT]", "[LOCATION]", "[SESSION]", "[STREAM]");
+ SplitReadStreamRequest request =
+ SplitReadStreamRequest.newBuilder().setName(name.toString()).build();
client.splitReadStream(request);
Assert.fail("No exception raised");
diff --git a/grpc-google-cloud-bigquerystorage-v1/pom.xml b/grpc-google-cloud-bigquerystorage-v1/pom.xml
index bc8d0f09af..19445b326e 100644
--- a/grpc-google-cloud-bigquerystorage-v1/pom.xml
+++ b/grpc-google-cloud-bigquerystorage-v1/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1
- 0.96.1
+ 0.96.2
grpc-google-cloud-bigquerystorage-v1
GRPC library for grpc-google-cloud-bigquerystorage-v1
com.google.cloud
google-cloud-bigquerystorage-parent
- 0.131.1-beta
+ 0.131.2-beta
diff --git a/grpc-google-cloud-bigquerystorage-v1alpha2/pom.xml b/grpc-google-cloud-bigquerystorage-v1alpha2/pom.xml
index 1aa6035916..b7af680a4a 100644
--- a/grpc-google-cloud-bigquerystorage-v1alpha2/pom.xml
+++ b/grpc-google-cloud-bigquerystorage-v1alpha2/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1alpha2
- 0.96.1
+ 0.96.2
grpc-google-cloud-bigquerystorage-v1alpha2
GRPC library for grpc-google-cloud-bigquerystorage-v1alpha2
com.google.cloud
google-cloud-bigquerystorage-parent
- 0.131.1-beta
+ 0.131.2-beta
diff --git a/grpc-google-cloud-bigquerystorage-v1beta1/pom.xml b/grpc-google-cloud-bigquerystorage-v1beta1/pom.xml
index 519c01f7c5..8e149633f4 100644
--- a/grpc-google-cloud-bigquerystorage-v1beta1/pom.xml
+++ b/grpc-google-cloud-bigquerystorage-v1beta1/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1beta1
- 0.96.1
+ 0.96.2
grpc-google-cloud-bigquerystorage-v1beta1
GRPC library for grpc-google-cloud-bigquerystorage-v1beta1
com.google.cloud
google-cloud-bigquerystorage-parent
- 0.131.1-beta
+ 0.131.2-beta
diff --git a/grpc-google-cloud-bigquerystorage-v1beta2/pom.xml b/grpc-google-cloud-bigquerystorage-v1beta2/pom.xml
index 6cbd47baff..1d179a3d06 100644
--- a/grpc-google-cloud-bigquerystorage-v1beta2/pom.xml
+++ b/grpc-google-cloud-bigquerystorage-v1beta2/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1beta2
- 0.96.1
+ 0.96.2
grpc-google-cloud-bigquerystorage-v1beta2
GRPC library for grpc-google-cloud-bigquerystorage-v1beta2
com.google.cloud
google-cloud-bigquerystorage-parent
- 0.131.1-beta
+ 0.131.2-beta
diff --git a/pom.xml b/pom.xml
index 0208ba6ba6..970e9082dc 100644
--- a/pom.xml
+++ b/pom.xml
@@ -4,7 +4,7 @@
com.google.cloud
google-cloud-bigquerystorage-parent
pom
- 0.131.1-beta
+ 0.131.2-beta
BigQuery Storage Parent
https://github.com/googleapis/java-bigquerystorage
@@ -91,59 +91,59 @@
com.google.cloud
google-cloud-shared-dependencies
- 0.2.0
+ 0.3.0
pom
import
com.google.cloud
google-cloud-bigquery
- 1.111.1
+ 1.113.0
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1alpha2
- 0.96.1
+ 0.96.2
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1beta1
- 0.96.1
+ 0.96.2
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1beta2
- 0.96.1
+ 0.96.2
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1
- 0.96.1
+ 0.96.2
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1alpha2
- 0.96.1
+ 0.96.2
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1beta1
- 0.96.1
+ 0.96.2
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1beta2
- 0.96.1
+ 0.96.2
com.google.api.grpc
grpc-google-cloud-bigquerystorage-v1
- 0.96.1
+ 0.96.2
com.google.cloud
google-cloud-bigquerystorage
- 0.131.1-beta
+ 0.131.2-beta
com.fasterxml.jackson.core
diff --git a/proto-google-cloud-bigquerystorage-v1/pom.xml b/proto-google-cloud-bigquerystorage-v1/pom.xml
index 6a693a53c4..fe8dd6d0f4 100644
--- a/proto-google-cloud-bigquerystorage-v1/pom.xml
+++ b/proto-google-cloud-bigquerystorage-v1/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1
- 0.96.1
+ 0.96.2
proto-google-cloud-bigquerystorage-v1
PROTO library for proto-google-cloud-bigquerystorage-v1
com.google.cloud
google-cloud-bigquerystorage-parent
- 0.131.1-beta
+ 0.131.2-beta
@@ -21,6 +21,14 @@
com.google.api.grpc
proto-google-common-protos
+
+ com.google.api
+ api-common
+
+
+ com.google.guava
+ guava
+
diff --git a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ProjectName.java b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ProjectName.java
new file mode 100644
index 0000000000..0d8b2c2e12
--- /dev/null
+++ b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ProjectName.java
@@ -0,0 +1,162 @@
+/*
+ * Copyright 2020 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery.storage.v1;
+
+import com.google.api.pathtemplate.PathTemplate;
+import com.google.api.resourcenames.ResourceName;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableMap;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/** AUTO-GENERATED DOCUMENTATION AND CLASS */
+@javax.annotation.Generated("by GAPIC protoc plugin")
+public class ProjectName implements ResourceName {
+
+ private static final PathTemplate PATH_TEMPLATE =
+ PathTemplate.createWithoutUrlEncoding("projects/{project}");
+
+ private volatile Map fieldValuesMap;
+
+ private final String project;
+
+ public String getProject() {
+ return project;
+ }
+
+ public static Builder newBuilder() {
+ return new Builder();
+ }
+
+ public Builder toBuilder() {
+ return new Builder(this);
+ }
+
+ private ProjectName(Builder builder) {
+ project = Preconditions.checkNotNull(builder.getProject());
+ }
+
+ public static ProjectName of(String project) {
+ return newBuilder().setProject(project).build();
+ }
+
+ public static String format(String project) {
+ return newBuilder().setProject(project).build().toString();
+ }
+
+ public static ProjectName parse(String formattedString) {
+ if (formattedString.isEmpty()) {
+ return null;
+ }
+ Map matchMap =
+ PATH_TEMPLATE.validatedMatch(
+ formattedString, "ProjectName.parse: formattedString not in valid format");
+ return of(matchMap.get("project"));
+ }
+
+ public static List parseList(List formattedStrings) {
+ List list = new ArrayList<>(formattedStrings.size());
+ for (String formattedString : formattedStrings) {
+ list.add(parse(formattedString));
+ }
+ return list;
+ }
+
+ public static List toStringList(List values) {
+ List list = new ArrayList(values.size());
+ for (ProjectName value : values) {
+ if (value == null) {
+ list.add("");
+ } else {
+ list.add(value.toString());
+ }
+ }
+ return list;
+ }
+
+ public static boolean isParsableFrom(String formattedString) {
+ return PATH_TEMPLATE.matches(formattedString);
+ }
+
+ public Map getFieldValuesMap() {
+ if (fieldValuesMap == null) {
+ synchronized (this) {
+ if (fieldValuesMap == null) {
+ ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder();
+ fieldMapBuilder.put("project", project);
+ fieldValuesMap = fieldMapBuilder.build();
+ }
+ }
+ }
+ return fieldValuesMap;
+ }
+
+ public String getFieldValue(String fieldName) {
+ return getFieldValuesMap().get(fieldName);
+ }
+
+ @Override
+ public String toString() {
+ return PATH_TEMPLATE.instantiate("project", project);
+ }
+
+ /** Builder for ProjectName. */
+ public static class Builder {
+
+ private String project;
+
+ public String getProject() {
+ return project;
+ }
+
+ public Builder setProject(String project) {
+ this.project = project;
+ return this;
+ }
+
+ private Builder() {}
+
+ private Builder(ProjectName projectName) {
+ project = projectName.project;
+ }
+
+ public ProjectName build() {
+ return new ProjectName(this);
+ }
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == this) {
+ return true;
+ }
+ if (o instanceof ProjectName) {
+ ProjectName that = (ProjectName) o;
+ return (this.project.equals(that.project));
+ }
+ return false;
+ }
+
+ @Override
+ public int hashCode() {
+ int h = 1;
+ h *= 1000003;
+ h ^= project.hashCode();
+ return h;
+ }
+}
diff --git a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java
new file mode 100644
index 0000000000..4aa5209ddd
--- /dev/null
+++ b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadSessionName.java
@@ -0,0 +1,210 @@
+/*
+ * Copyright 2020 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery.storage.v1;
+
+import com.google.api.pathtemplate.PathTemplate;
+import com.google.api.resourcenames.ResourceName;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableMap;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/** AUTO-GENERATED DOCUMENTATION AND CLASS */
+@javax.annotation.Generated("by GAPIC protoc plugin")
+public class ReadSessionName implements ResourceName {
+
+ private static final PathTemplate PATH_TEMPLATE =
+ PathTemplate.createWithoutUrlEncoding(
+ "projects/{project}/locations/{location}/sessions/{session}");
+
+ private volatile Map fieldValuesMap;
+
+ private final String project;
+ private final String location;
+ private final String session;
+
+ public String getProject() {
+ return project;
+ }
+
+ public String getLocation() {
+ return location;
+ }
+
+ public String getSession() {
+ return session;
+ }
+
+ public static Builder newBuilder() {
+ return new Builder();
+ }
+
+ public Builder toBuilder() {
+ return new Builder(this);
+ }
+
+ private ReadSessionName(Builder builder) {
+ project = Preconditions.checkNotNull(builder.getProject());
+ location = Preconditions.checkNotNull(builder.getLocation());
+ session = Preconditions.checkNotNull(builder.getSession());
+ }
+
+ public static ReadSessionName of(String project, String location, String session) {
+ return newBuilder().setProject(project).setLocation(location).setSession(session).build();
+ }
+
+ public static String format(String project, String location, String session) {
+ return newBuilder()
+ .setProject(project)
+ .setLocation(location)
+ .setSession(session)
+ .build()
+ .toString();
+ }
+
+ public static ReadSessionName parse(String formattedString) {
+ if (formattedString.isEmpty()) {
+ return null;
+ }
+ Map matchMap =
+ PATH_TEMPLATE.validatedMatch(
+ formattedString, "ReadSessionName.parse: formattedString not in valid format");
+ return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("session"));
+ }
+
+ public static List parseList(List formattedStrings) {
+ List list = new ArrayList<>(formattedStrings.size());
+ for (String formattedString : formattedStrings) {
+ list.add(parse(formattedString));
+ }
+ return list;
+ }
+
+ public static List toStringList(List values) {
+ List list = new ArrayList(values.size());
+ for (ReadSessionName value : values) {
+ if (value == null) {
+ list.add("");
+ } else {
+ list.add(value.toString());
+ }
+ }
+ return list;
+ }
+
+ public static boolean isParsableFrom(String formattedString) {
+ return PATH_TEMPLATE.matches(formattedString);
+ }
+
+ public Map getFieldValuesMap() {
+ if (fieldValuesMap == null) {
+ synchronized (this) {
+ if (fieldValuesMap == null) {
+ ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder();
+ fieldMapBuilder.put("project", project);
+ fieldMapBuilder.put("location", location);
+ fieldMapBuilder.put("session", session);
+ fieldValuesMap = fieldMapBuilder.build();
+ }
+ }
+ }
+ return fieldValuesMap;
+ }
+
+ public String getFieldValue(String fieldName) {
+ return getFieldValuesMap().get(fieldName);
+ }
+
+ @Override
+ public String toString() {
+ return PATH_TEMPLATE.instantiate("project", project, "location", location, "session", session);
+ }
+
+ /** Builder for ReadSessionName. */
+ public static class Builder {
+
+ private String project;
+ private String location;
+ private String session;
+
+ public String getProject() {
+ return project;
+ }
+
+ public String getLocation() {
+ return location;
+ }
+
+ public String getSession() {
+ return session;
+ }
+
+ public Builder setProject(String project) {
+ this.project = project;
+ return this;
+ }
+
+ public Builder setLocation(String location) {
+ this.location = location;
+ return this;
+ }
+
+ public Builder setSession(String session) {
+ this.session = session;
+ return this;
+ }
+
+ private Builder() {}
+
+ private Builder(ReadSessionName readSessionName) {
+ project = readSessionName.project;
+ location = readSessionName.location;
+ session = readSessionName.session;
+ }
+
+ public ReadSessionName build() {
+ return new ReadSessionName(this);
+ }
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == this) {
+ return true;
+ }
+ if (o instanceof ReadSessionName) {
+ ReadSessionName that = (ReadSessionName) o;
+ return (this.project.equals(that.project))
+ && (this.location.equals(that.location))
+ && (this.session.equals(that.session));
+ }
+ return false;
+ }
+
+ @Override
+ public int hashCode() {
+ int h = 1;
+ h *= 1000003;
+ h ^= project.hashCode();
+ h *= 1000003;
+ h ^= location.hashCode();
+ h *= 1000003;
+ h ^= session.hashCode();
+ return h;
+ }
+}
diff --git a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java
new file mode 100644
index 0000000000..9c8236c663
--- /dev/null
+++ b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/ReadStreamName.java
@@ -0,0 +1,242 @@
+/*
+ * Copyright 2020 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery.storage.v1;
+
+import com.google.api.pathtemplate.PathTemplate;
+import com.google.api.resourcenames.ResourceName;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableMap;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/** AUTO-GENERATED DOCUMENTATION AND CLASS */
+@javax.annotation.Generated("by GAPIC protoc plugin")
+public class ReadStreamName implements ResourceName {
+
+ private static final PathTemplate PATH_TEMPLATE =
+ PathTemplate.createWithoutUrlEncoding(
+ "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}");
+
+ private volatile Map fieldValuesMap;
+
+ private final String project;
+ private final String location;
+ private final String session;
+ private final String stream;
+
+ public String getProject() {
+ return project;
+ }
+
+ public String getLocation() {
+ return location;
+ }
+
+ public String getSession() {
+ return session;
+ }
+
+ public String getStream() {
+ return stream;
+ }
+
+ public static Builder newBuilder() {
+ return new Builder();
+ }
+
+ public Builder toBuilder() {
+ return new Builder(this);
+ }
+
+ private ReadStreamName(Builder builder) {
+ project = Preconditions.checkNotNull(builder.getProject());
+ location = Preconditions.checkNotNull(builder.getLocation());
+ session = Preconditions.checkNotNull(builder.getSession());
+ stream = Preconditions.checkNotNull(builder.getStream());
+ }
+
+ public static ReadStreamName of(String project, String location, String session, String stream) {
+ return newBuilder()
+ .setProject(project)
+ .setLocation(location)
+ .setSession(session)
+ .setStream(stream)
+ .build();
+ }
+
+ public static String format(String project, String location, String session, String stream) {
+ return newBuilder()
+ .setProject(project)
+ .setLocation(location)
+ .setSession(session)
+ .setStream(stream)
+ .build()
+ .toString();
+ }
+
+ public static ReadStreamName parse(String formattedString) {
+ if (formattedString.isEmpty()) {
+ return null;
+ }
+ Map matchMap =
+ PATH_TEMPLATE.validatedMatch(
+ formattedString, "ReadStreamName.parse: formattedString not in valid format");
+ return of(
+ matchMap.get("project"),
+ matchMap.get("location"),
+ matchMap.get("session"),
+ matchMap.get("stream"));
+ }
+
+ public static List parseList(List formattedStrings) {
+ List list = new ArrayList<>(formattedStrings.size());
+ for (String formattedString : formattedStrings) {
+ list.add(parse(formattedString));
+ }
+ return list;
+ }
+
+ public static List toStringList(List values) {
+ List list = new ArrayList(values.size());
+ for (ReadStreamName value : values) {
+ if (value == null) {
+ list.add("");
+ } else {
+ list.add(value.toString());
+ }
+ }
+ return list;
+ }
+
+ public static boolean isParsableFrom(String formattedString) {
+ return PATH_TEMPLATE.matches(formattedString);
+ }
+
+ public Map getFieldValuesMap() {
+ if (fieldValuesMap == null) {
+ synchronized (this) {
+ if (fieldValuesMap == null) {
+ ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder();
+ fieldMapBuilder.put("project", project);
+ fieldMapBuilder.put("location", location);
+ fieldMapBuilder.put("session", session);
+ fieldMapBuilder.put("stream", stream);
+ fieldValuesMap = fieldMapBuilder.build();
+ }
+ }
+ }
+ return fieldValuesMap;
+ }
+
+ public String getFieldValue(String fieldName) {
+ return getFieldValuesMap().get(fieldName);
+ }
+
+ @Override
+ public String toString() {
+ return PATH_TEMPLATE.instantiate(
+ "project", project, "location", location, "session", session, "stream", stream);
+ }
+
+ /** Builder for ReadStreamName. */
+ public static class Builder {
+
+ private String project;
+ private String location;
+ private String session;
+ private String stream;
+
+ public String getProject() {
+ return project;
+ }
+
+ public String getLocation() {
+ return location;
+ }
+
+ public String getSession() {
+ return session;
+ }
+
+ public String getStream() {
+ return stream;
+ }
+
+ public Builder setProject(String project) {
+ this.project = project;
+ return this;
+ }
+
+ public Builder setLocation(String location) {
+ this.location = location;
+ return this;
+ }
+
+ public Builder setSession(String session) {
+ this.session = session;
+ return this;
+ }
+
+ public Builder setStream(String stream) {
+ this.stream = stream;
+ return this;
+ }
+
+ private Builder() {}
+
+ private Builder(ReadStreamName readStreamName) {
+ project = readStreamName.project;
+ location = readStreamName.location;
+ session = readStreamName.session;
+ stream = readStreamName.stream;
+ }
+
+ public ReadStreamName build() {
+ return new ReadStreamName(this);
+ }
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == this) {
+ return true;
+ }
+ if (o instanceof ReadStreamName) {
+ ReadStreamName that = (ReadStreamName) o;
+ return (this.project.equals(that.project))
+ && (this.location.equals(that.location))
+ && (this.session.equals(that.session))
+ && (this.stream.equals(that.stream));
+ }
+ return false;
+ }
+
+ @Override
+ public int hashCode() {
+ int h = 1;
+ h *= 1000003;
+ h ^= project.hashCode();
+ h *= 1000003;
+ h ^= location.hashCode();
+ h *= 1000003;
+ h ^= session.hashCode();
+ h *= 1000003;
+ h ^= stream.hashCode();
+ return h;
+ }
+}
diff --git a/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java
new file mode 100644
index 0000000000..4478859799
--- /dev/null
+++ b/proto-google-cloud-bigquerystorage-v1/src/main/java/com/google/cloud/bigquery/storage/v1/TableName.java
@@ -0,0 +1,204 @@
+/*
+ * Copyright 2020 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery.storage.v1;
+
+import com.google.api.pathtemplate.PathTemplate;
+import com.google.api.resourcenames.ResourceName;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableMap;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/** AUTO-GENERATED DOCUMENTATION AND CLASS */
+@javax.annotation.Generated("by GAPIC protoc plugin")
+public class TableName implements ResourceName {
+
+ private static final PathTemplate PATH_TEMPLATE =
+ PathTemplate.createWithoutUrlEncoding("projects/{project}/datasets/{dataset}/tables/{table}");
+
+ private volatile Map fieldValuesMap;
+
+ private final String project;
+ private final String dataset;
+ private final String table;
+
+ public String getProject() {
+ return project;
+ }
+
+ public String getDataset() {
+ return dataset;
+ }
+
+ public String getTable() {
+ return table;
+ }
+
+ public static Builder newBuilder() {
+ return new Builder();
+ }
+
+ public Builder toBuilder() {
+ return new Builder(this);
+ }
+
+ private TableName(Builder builder) {
+ project = Preconditions.checkNotNull(builder.getProject());
+ dataset = Preconditions.checkNotNull(builder.getDataset());
+ table = Preconditions.checkNotNull(builder.getTable());
+ }
+
+ public static TableName of(String project, String dataset, String table) {
+ return newBuilder().setProject(project).setDataset(dataset).setTable(table).build();
+ }
+
+ public static String format(String project, String dataset, String table) {
+ return newBuilder().setProject(project).setDataset(dataset).setTable(table).build().toString();
+ }
+
+ public static TableName parse(String formattedString) {
+ if (formattedString.isEmpty()) {
+ return null;
+ }
+ Map matchMap =
+ PATH_TEMPLATE.validatedMatch(
+ formattedString, "TableName.parse: formattedString not in valid format");
+ return of(matchMap.get("project"), matchMap.get("dataset"), matchMap.get("table"));
+ }
+
+ public static List parseList(List formattedStrings) {
+ List list = new ArrayList<>(formattedStrings.size());
+ for (String formattedString : formattedStrings) {
+ list.add(parse(formattedString));
+ }
+ return list;
+ }
+
+ public static List toStringList(List values) {
+ List list = new ArrayList(values.size());
+ for (TableName value : values) {
+ if (value == null) {
+ list.add("");
+ } else {
+ list.add(value.toString());
+ }
+ }
+ return list;
+ }
+
+ public static boolean isParsableFrom(String formattedString) {
+ return PATH_TEMPLATE.matches(formattedString);
+ }
+
+ public Map getFieldValuesMap() {
+ if (fieldValuesMap == null) {
+ synchronized (this) {
+ if (fieldValuesMap == null) {
+ ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder();
+ fieldMapBuilder.put("project", project);
+ fieldMapBuilder.put("dataset", dataset);
+ fieldMapBuilder.put("table", table);
+ fieldValuesMap = fieldMapBuilder.build();
+ }
+ }
+ }
+ return fieldValuesMap;
+ }
+
+ public String getFieldValue(String fieldName) {
+ return getFieldValuesMap().get(fieldName);
+ }
+
+ @Override
+ public String toString() {
+ return PATH_TEMPLATE.instantiate("project", project, "dataset", dataset, "table", table);
+ }
+
+ /** Builder for TableName. */
+ public static class Builder {
+
+ private String project;
+ private String dataset;
+ private String table;
+
+ public String getProject() {
+ return project;
+ }
+
+ public String getDataset() {
+ return dataset;
+ }
+
+ public String getTable() {
+ return table;
+ }
+
+ public Builder setProject(String project) {
+ this.project = project;
+ return this;
+ }
+
+ public Builder setDataset(String dataset) {
+ this.dataset = dataset;
+ return this;
+ }
+
+ public Builder setTable(String table) {
+ this.table = table;
+ return this;
+ }
+
+ private Builder() {}
+
+ private Builder(TableName tableName) {
+ project = tableName.project;
+ dataset = tableName.dataset;
+ table = tableName.table;
+ }
+
+ public TableName build() {
+ return new TableName(this);
+ }
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == this) {
+ return true;
+ }
+ if (o instanceof TableName) {
+ TableName that = (TableName) o;
+ return (this.project.equals(that.project))
+ && (this.dataset.equals(that.dataset))
+ && (this.table.equals(that.table));
+ }
+ return false;
+ }
+
+ @Override
+ public int hashCode() {
+ int h = 1;
+ h *= 1000003;
+ h ^= project.hashCode();
+ h *= 1000003;
+ h ^= dataset.hashCode();
+ h *= 1000003;
+ h ^= table.hashCode();
+ return h;
+ }
+}
diff --git a/proto-google-cloud-bigquerystorage-v1alpha2/pom.xml b/proto-google-cloud-bigquerystorage-v1alpha2/pom.xml
index cf78fe82d0..5b1e233d5f 100644
--- a/proto-google-cloud-bigquerystorage-v1alpha2/pom.xml
+++ b/proto-google-cloud-bigquerystorage-v1alpha2/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1alpha2
- 0.96.1
+ 0.96.2
proto-google-cloud-bigquerystorage-v1alpha2
PROTO library for proto-google-cloud-bigquerystorage-v1alpha2
com.google.cloud
google-cloud-bigquerystorage-parent
- 0.131.1-beta
+ 0.131.2-beta
@@ -21,6 +21,14 @@
com.google.api.grpc
proto-google-common-protos
+
+ com.google.api
+ api-common
+
+
+ com.google.guava
+ guava
+
diff --git a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/ProtoBufProto.java b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/ProtoBufProto.java
index c41adbb233..b2d49ae9ca 100644
--- a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/ProtoBufProto.java
+++ b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/ProtoBufProto.java
@@ -36,9 +36,8 @@ public interface ProtoSchemaOrBuilder
*
*
*
- * Message descriptor for the data. The descriptor has to be self contained
- * to include all the nested type definition, excepted for proto buffer well
- * known types
+ * Descriptor for input message. The descriptor has to be self contained,
+ * including all the nested types, excepted for proto buffer well known types
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf)
* and zetasql public protos
* (https://github.com/google/zetasql/tree/master/zetasql/public/proto).
@@ -53,9 +52,8 @@ public interface ProtoSchemaOrBuilder
*
*
*
- * Message descriptor for the data. The descriptor has to be self contained
- * to include all the nested type definition, excepted for proto buffer well
- * known types
+ * Descriptor for input message. The descriptor has to be self contained,
+ * including all the nested types, excepted for proto buffer well known types
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf)
* and zetasql public protos
* (https://github.com/google/zetasql/tree/master/zetasql/public/proto).
@@ -70,9 +68,8 @@ public interface ProtoSchemaOrBuilder
*
*
*
- * Message descriptor for the data. The descriptor has to be self contained
- * to include all the nested type definition, excepted for proto buffer well
- * known types
+ * Descriptor for input message. The descriptor has to be self contained,
+ * including all the nested types, excepted for proto buffer well known types
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf)
* and zetasql public protos
* (https://github.com/google/zetasql/tree/master/zetasql/public/proto).
@@ -189,9 +186,8 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
*
*
*
- * Message descriptor for the data. The descriptor has to be self contained
- * to include all the nested type definition, excepted for proto buffer well
- * known types
+ * Descriptor for input message. The descriptor has to be self contained,
+ * including all the nested types, excepted for proto buffer well known types
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf)
* and zetasql public protos
* (https://github.com/google/zetasql/tree/master/zetasql/public/proto).
@@ -208,9 +204,8 @@ public boolean hasProtoDescriptor() {
*
*
*
- * Message descriptor for the data. The descriptor has to be self contained
- * to include all the nested type definition, excepted for proto buffer well
- * known types
+ * Descriptor for input message. The descriptor has to be self contained,
+ * including all the nested types, excepted for proto buffer well known types
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf)
* and zetasql public protos
* (https://github.com/google/zetasql/tree/master/zetasql/public/proto).
@@ -229,9 +224,8 @@ public com.google.protobuf.DescriptorProtos.DescriptorProto getProtoDescriptor()
*
*
*
- * Message descriptor for the data. The descriptor has to be self contained
- * to include all the nested type definition, excepted for proto buffer well
- * known types
+ * Descriptor for input message. The descriptor has to be self contained,
+ * including all the nested types, excepted for proto buffer well known types
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf)
* and zetasql public protos
* (https://github.com/google/zetasql/tree/master/zetasql/public/proto).
@@ -609,9 +603,8 @@ public Builder mergeFrom(
*
*
*
- * Message descriptor for the data. The descriptor has to be self contained
- * to include all the nested type definition, excepted for proto buffer well
- * known types
+ * Descriptor for input message. The descriptor has to be self contained,
+ * including all the nested types, excepted for proto buffer well known types
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf)
* and zetasql public protos
* (https://github.com/google/zetasql/tree/master/zetasql/public/proto).
@@ -628,9 +621,8 @@ public boolean hasProtoDescriptor() {
*
*
*
- * Message descriptor for the data. The descriptor has to be self contained
- * to include all the nested type definition, excepted for proto buffer well
- * known types
+ * Descriptor for input message. The descriptor has to be self contained,
+ * including all the nested types, excepted for proto buffer well known types
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf)
* and zetasql public protos
* (https://github.com/google/zetasql/tree/master/zetasql/public/proto).
@@ -653,9 +645,8 @@ public com.google.protobuf.DescriptorProtos.DescriptorProto getProtoDescriptor()
*
*
*
- * Message descriptor for the data. The descriptor has to be self contained
- * to include all the nested type definition, excepted for proto buffer well
- * known types
+ * Descriptor for input message. The descriptor has to be self contained,
+ * including all the nested types, excepted for proto buffer well known types
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf)
* and zetasql public protos
* (https://github.com/google/zetasql/tree/master/zetasql/public/proto).
@@ -681,9 +672,8 @@ public Builder setProtoDescriptor(
*
*
*
- * Message descriptor for the data. The descriptor has to be self contained
- * to include all the nested type definition, excepted for proto buffer well
- * known types
+ * Descriptor for input message. The descriptor has to be self contained,
+ * including all the nested types, excepted for proto buffer well known types
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf)
* and zetasql public protos
* (https://github.com/google/zetasql/tree/master/zetasql/public/proto).
@@ -706,9 +696,8 @@ public Builder setProtoDescriptor(
*
*
*
- * Message descriptor for the data. The descriptor has to be self contained
- * to include all the nested type definition, excepted for proto buffer well
- * known types
+ * Descriptor for input message. The descriptor has to be self contained,
+ * including all the nested types, excepted for proto buffer well known types
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf)
* and zetasql public protos
* (https://github.com/google/zetasql/tree/master/zetasql/public/proto).
@@ -738,9 +727,8 @@ public Builder mergeProtoDescriptor(
*
*
*
- * Message descriptor for the data. The descriptor has to be self contained
- * to include all the nested type definition, excepted for proto buffer well
- * known types
+ * Descriptor for input message. The descriptor has to be self contained,
+ * including all the nested types, excepted for proto buffer well known types
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf)
* and zetasql public protos
* (https://github.com/google/zetasql/tree/master/zetasql/public/proto).
@@ -763,9 +751,8 @@ public Builder clearProtoDescriptor() {
*
*
*
- * Message descriptor for the data. The descriptor has to be self contained
- * to include all the nested type definition, excepted for proto buffer well
- * known types
+ * Descriptor for input message. The descriptor has to be self contained,
+ * including all the nested types, excepted for proto buffer well known types
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf)
* and zetasql public protos
* (https://github.com/google/zetasql/tree/master/zetasql/public/proto).
@@ -783,9 +770,8 @@ public Builder clearProtoDescriptor() {
*
*
*
- * Message descriptor for the data. The descriptor has to be self contained
- * to include all the nested type definition, excepted for proto buffer well
- * known types
+ * Descriptor for input message. The descriptor has to be self contained,
+ * including all the nested types, excepted for proto buffer well known types
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf)
* and zetasql public protos
* (https://github.com/google/zetasql/tree/master/zetasql/public/proto).
@@ -807,9 +793,8 @@ public Builder clearProtoDescriptor() {
*
*
*
- * Message descriptor for the data. The descriptor has to be self contained
- * to include all the nested type definition, excepted for proto buffer well
- * known types
+ * Descriptor for input message. The descriptor has to be self contained,
+ * including all the nested types, excepted for proto buffer well known types
* (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf)
* and zetasql public protos
* (https://github.com/google/zetasql/tree/master/zetasql/public/proto).
diff --git a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/Storage.java b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/Storage.java
index 964a819e6a..b0d8882f05 100644
--- a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/Storage.java
+++ b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/Storage.java
@@ -37,10 +37,12 @@ public interface CreateWriteStreamRequestOrBuilder
*
*
* Required. Reference to the table to which the stream belongs, in the format
- * of `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
+ * of `projects/{project}/datasets/{dataset}/tables/{table}`.
*
*
- * string parent = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The parent.
*/
@@ -50,10 +52,12 @@ public interface CreateWriteStreamRequestOrBuilder
*
*
* Required. Reference to the table to which the stream belongs, in the format
- * of `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
+ * of `projects/{project}/datasets/{dataset}/tables/{table}`.
*
*
- * string parent = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The bytes for parent.
*/
@@ -220,10 +224,12 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
*
*
* Required. Reference to the table to which the stream belongs, in the format
- * of `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
+ * of `projects/{project}/datasets/{dataset}/tables/{table}`.
*
*
- * string parent = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The parent.
*/
@@ -243,10 +249,12 @@ public java.lang.String getParent() {
*
*
* Required. Reference to the table to which the stream belongs, in the format
- * of `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
+ * of `projects/{project}/datasets/{dataset}/tables/{table}`.
*
*
- * string parent = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The bytes for parent.
*/
@@ -691,10 +699,12 @@ public Builder mergeFrom(
*
*
* Required. Reference to the table to which the stream belongs, in the format
- * of `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
+ * of `projects/{project}/datasets/{dataset}/tables/{table}`.
*
*
- * string parent = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The parent.
*/
@@ -714,10 +724,12 @@ public java.lang.String getParent() {
*
*
* Required. Reference to the table to which the stream belongs, in the format
- * of `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
+ * of `projects/{project}/datasets/{dataset}/tables/{table}`.
*
*
- * string parent = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The bytes for parent.
*/
@@ -737,10 +749,12 @@ public com.google.protobuf.ByteString getParentBytes() {
*
*
* Required. Reference to the table to which the stream belongs, in the format
- * of `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
+ * of `projects/{project}/datasets/{dataset}/tables/{table}`.
*
*
- * string parent = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @param value The parent to set.
* @return This builder for chaining.
@@ -759,10 +773,12 @@ public Builder setParent(java.lang.String value) {
*
*
* Required. Reference to the table to which the stream belongs, in the format
- * of `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
+ * of `projects/{project}/datasets/{dataset}/tables/{table}`.
*
*
- * string parent = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return This builder for chaining.
*/
@@ -777,10 +793,12 @@ public Builder clearParent() {
*
*
* Required. Reference to the table to which the stream belongs, in the format
- * of `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
+ * of `projects/{project}/datasets/{dataset}/tables/{table}`.
*
*
- * string parent = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
@@ -1075,7 +1093,9 @@ public interface AppendRowsRequestOrBuilder
* stream name, it must equal to the value provided in the first request.
*
*
- * string write_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string write_stream = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The writeStream.
*/
@@ -1089,7 +1109,9 @@ public interface AppendRowsRequestOrBuilder
* stream name, it must equal to the value provided in the first request.
*
*
- * string write_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string write_stream = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The bytes for writeStream.
*/
@@ -2484,7 +2506,9 @@ public RowsCase getRowsCase() {
* stream name, it must equal to the value provided in the first request.
*
*
- * string write_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string write_stream = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The writeStream.
*/
@@ -2508,7 +2532,9 @@ public java.lang.String getWriteStream() {
* stream name, it must equal to the value provided in the first request.
*
*
- * string write_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string write_stream = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The bytes for writeStream.
*/
@@ -3059,7 +3085,9 @@ public Builder clearRows() {
* stream name, it must equal to the value provided in the first request.
*
*
- * string write_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string write_stream = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The writeStream.
*/
@@ -3083,7 +3111,9 @@ public java.lang.String getWriteStream() {
* stream name, it must equal to the value provided in the first request.
*
*
- * string write_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string write_stream = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The bytes for writeStream.
*/
@@ -3107,7 +3137,9 @@ public com.google.protobuf.ByteString getWriteStreamBytes() {
* stream name, it must equal to the value provided in the first request.
*
*
- * string write_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string write_stream = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @param value The writeStream to set.
* @return This builder for chaining.
@@ -3130,7 +3162,9 @@ public Builder setWriteStream(java.lang.String value) {
* stream name, it must equal to the value provided in the first request.
*
*
- * string write_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string write_stream = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return This builder for chaining.
*/
@@ -3149,7 +3183,9 @@ public Builder clearWriteStream() {
* stream name, it must equal to the value provided in the first request.
*
*
- * string write_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string write_stream = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @param value The bytes for writeStream to set.
* @return This builder for chaining.
@@ -4619,10 +4655,12 @@ public interface GetWriteStreamRequestOrBuilder
*
*
* Required. Name of the stream to get, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
- * string name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The name.
*/
@@ -4632,10 +4670,12 @@ public interface GetWriteStreamRequestOrBuilder
*
*
* Required. Name of the stream to get, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
- * string name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The bytes for name.
*/
@@ -4742,10 +4782,12 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
*
*
* Required. Name of the stream to get, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
- * string name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The name.
*/
@@ -4765,10 +4807,12 @@ public java.lang.String getName() {
*
*
* Required. Name of the stream to get, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
- * string name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The bytes for name.
*/
@@ -5132,10 +5176,12 @@ public Builder mergeFrom(
*
*
* Required. Name of the stream to get, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
- * string name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The name.
*/
@@ -5155,10 +5201,12 @@ public java.lang.String getName() {
*
*
* Required. Name of the stream to get, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
- * string name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The bytes for name.
*/
@@ -5178,10 +5226,12 @@ public com.google.protobuf.ByteString getNameBytes() {
*
*
* Required. Name of the stream to get, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
- * string name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @param value The name to set.
* @return This builder for chaining.
@@ -5200,10 +5250,12 @@ public Builder setName(java.lang.String value) {
*
*
* Required. Name of the stream to get, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
- * string name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return This builder for chaining.
*/
@@ -5218,10 +5270,12 @@ public Builder clearName() {
*
*
* Required. Name of the stream to get, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
- * string name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @param value The bytes for name to set.
* @return This builder for chaining.
@@ -5303,10 +5357,12 @@ public interface BatchCommitWriteStreamsRequestOrBuilder
*
*
* Required. Parent table that all the streams should belong to, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}`.
*
*
- * string parent = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The parent.
*/
@@ -5316,10 +5372,12 @@ public interface BatchCommitWriteStreamsRequestOrBuilder
*
*
* Required. Parent table that all the streams should belong to, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}`.
*
*
- * string parent = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The bytes for parent.
*/
@@ -5495,10 +5553,12 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
*
*
* Required. Parent table that all the streams should belong to, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}`.
*
*
- * string parent = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The parent.
*/
@@ -5518,10 +5578,12 @@ public java.lang.String getParent() {
*
*
* Required. Parent table that all the streams should belong to, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}`.
*
*
- * string parent = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The bytes for parent.
*/
@@ -5989,10 +6051,12 @@ public Builder mergeFrom(
*
*
* Required. Parent table that all the streams should belong to, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}`.
*
*
- * string parent = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The parent.
*/
@@ -6012,10 +6076,12 @@ public java.lang.String getParent() {
*
*
* Required. Parent table that all the streams should belong to, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}`.
*
*
- * string parent = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The bytes for parent.
*/
@@ -6035,10 +6101,12 @@ public com.google.protobuf.ByteString getParentBytes() {
*
*
* Required. Parent table that all the streams should belong to, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}`.
*
*
- * string parent = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @param value The parent to set.
* @return This builder for chaining.
@@ -6057,10 +6125,12 @@ public Builder setParent(java.lang.String value) {
*
*
* Required. Parent table that all the streams should belong to, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}`.
*
*
- * string parent = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return This builder for chaining.
*/
@@ -6075,10 +6145,12 @@ public Builder clearParent() {
*
*
* Required. Parent table that all the streams should belong to, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}`.
*
*
- * string parent = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
@@ -7120,10 +7192,12 @@ public interface FinalizeWriteStreamRequestOrBuilder
*
*
* Required. Name of the stream to finalize, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
- * string name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The name.
*/
@@ -7133,10 +7207,12 @@ public interface FinalizeWriteStreamRequestOrBuilder
*
*
* Required. Name of the stream to finalize, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
- * string name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The bytes for name.
*/
@@ -7244,10 +7320,12 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
*
*
* Required. Name of the stream to finalize, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
- * string name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The name.
*/
@@ -7267,10 +7345,12 @@ public java.lang.String getName() {
*
*
* Required. Name of the stream to finalize, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
- * string name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The bytes for name.
*/
@@ -7637,10 +7717,12 @@ public Builder mergeFrom(
*
*
* Required. Name of the stream to finalize, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
- * string name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The name.
*/
@@ -7660,10 +7742,12 @@ public java.lang.String getName() {
*
*
* Required. Name of the stream to finalize, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
- * string name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The bytes for name.
*/
@@ -7683,10 +7767,12 @@ public com.google.protobuf.ByteString getNameBytes() {
*
*
* Required. Name of the stream to finalize, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
- * string name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @param value The name to set.
* @return This builder for chaining.
@@ -7705,10 +7791,12 @@ public Builder setName(java.lang.String value) {
*
*
* Required. Name of the stream to finalize, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
- * string name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return This builder for chaining.
*/
@@ -7723,10 +7811,12 @@ public Builder clearName() {
*
*
* Required. Name of the stream to finalize, in the form of
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
- * string name = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @param value The bytes for name to set.
* @return This builder for chaining.
@@ -8429,70 +8519,80 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
+ "/storage.proto\022&google.cloud.bigquery.st"
+ "orage.v1alpha2\032\034google/api/annotations.p"
+ "roto\032\027google/api/client.proto\032\037google/ap"
- + "i/field_behavior.proto\0325google/cloud/big"
- + "query/storage/v1alpha2/protobuf.proto\0323g"
- + "oogle/cloud/bigquery/storage/v1alpha2/st"
- + "ream.proto\032\033google/protobuf/empty.proto\032"
- + "\037google/protobuf/timestamp.proto\032\036google"
- + "/protobuf/wrappers.proto\032\027google/rpc/sta"
- + "tus.proto\"\177\n\030CreateWriteStreamRequest\022\023\n"
- + "\006parent\030\001 \001(\tB\003\340A\002\022N\n\014write_stream\030\002 \001(\013"
- + "23.google.cloud.bigquery.storage.v1alpha"
- + "2.WriteStreamB\003\340A\002\"\336\002\n\021AppendRowsRequest"
- + "\022\031\n\014write_stream\030\001 \001(\tB\003\340A\002\0220\n\006offset\030\002 "
- + "\001(\0132\033.google.protobuf.Int64ValueB\003\340A\001\022Y\n"
- + "\nproto_rows\030\004 \001(\0132C.google.cloud.bigquer"
- + "y.storage.v1alpha2.AppendRowsRequest.Pro"
- + "toDataH\000\032\230\001\n\tProtoData\022J\n\rwriter_schema\030"
- + "\001 \001(\01323.google.cloud.bigquery.storage.v1"
- + "alpha2.ProtoSchema\022?\n\004rows\030\002 \001(\01321.googl"
- + "e.cloud.bigquery.storage.v1alpha2.ProtoR"
- + "owsB\006\n\004rows\"W\n\022AppendRowsResponse\022\020\n\006off"
- + "set\030\001 \001(\003H\000\022#\n\005error\030\002 \001(\0132\022.google.rpc."
- + "StatusH\000B\n\n\010response\"*\n\025GetWriteStreamRe"
- + "quest\022\021\n\004name\030\001 \001(\tB\003\340A\002\"Q\n\036BatchCommitW"
- + "riteStreamsRequest\022\023\n\006parent\030\001 \001(\tB\003\340A\002\022"
- + "\032\n\rwrite_streams\030\002 \003(\tB\003\340A\002\"R\n\037BatchComm"
- + "itWriteStreamsResponse\022/\n\013commit_time\030\001 "
- + "\001(\0132\032.google.protobuf.Timestamp\"/\n\032Final"
- + "izeWriteStreamRequest\022\021\n\004name\030\001 \001(\tB\003\340A\002"
- + "\"0\n\033FinalizeWriteStreamResponse\022\021\n\trow_c"
- + "ount\030\001 \001(\0032\201\n\n\rBigQueryWrite\022\310\001\n\021CreateW"
- + "riteStream\022@.google.cloud.bigquery.stora"
- + "ge.v1alpha2.CreateWriteStreamRequest\0323.g"
- + "oogle.cloud.bigquery.storage.v1alpha2.Wr"
- + "iteStream\"<\202\323\344\223\0026\"1/v1alpha2/{parent=pro"
- + "jects/*/datasets/*/tables/*}:\001*\022\325\001\n\nAppe"
- + "ndRows\0229.google.cloud.bigquery.storage.v"
- + "1alpha2.AppendRowsRequest\032:.google.cloud"
- + ".bigquery.storage.v1alpha2.AppendRowsRes"
- + "ponse\"L\202\323\344\223\002F\"A/v1alpha2/{write_stream=p"
- + "rojects/*/datasets/*/tables/*/streams/*}"
- + ":\001*(\0010\001\022\312\001\n\016GetWriteStream\022=.google.clou"
- + "d.bigquery.storage.v1alpha2.GetWriteStre"
- + "amRequest\0323.google.cloud.bigquery.storag"
- + "e.v1alpha2.WriteStream\"D\202\323\344\223\002>\"9/v1alpha"
- + "2/{name=projects/*/datasets/*/tables/*/s"
- + "treams/*}:\001*\022\344\001\n\023FinalizeWriteStream\022B.g"
- + "oogle.cloud.bigquery.storage.v1alpha2.Fi"
- + "nalizeWriteStreamRequest\032C.google.cloud."
- + "bigquery.storage.v1alpha2.FinalizeWriteS"
- + "treamResponse\"D\202\323\344\223\002>\"9/v1alpha2/{name=p"
+ + "i/field_behavior.proto\032\031google/api/resou"
+ + "rce.proto\0325google/cloud/bigquery/storage"
+ + "/v1alpha2/protobuf.proto\0323google/cloud/b"
+ + "igquery/storage/v1alpha2/stream.proto\0322g"
+ + "oogle/cloud/bigquery/storage/v1alpha2/ta"
+ + "ble.proto\032\033google/protobuf/empty.proto\032\037"
+ + "google/protobuf/timestamp.proto\032\036google/"
+ + "protobuf/wrappers.proto\032\027google/rpc/stat"
+ + "us.proto\"\250\001\n\030CreateWriteStreamRequest\022<\n"
+ + "\006parent\030\001 \001(\tB,\340A\002\372A&\n$bigquerystorage.g"
+ + "oogleapis.com/Table\022N\n\014write_stream\030\002 \001("
+ + "\01323.google.cloud.bigquery.storage.v1alph"
+ + "a2.WriteStreamB\003\340A\002\"\215\003\n\021AppendRowsReques"
+ + "t\022H\n\014write_stream\030\001 \001(\tB2\340A\002\372A,\n*bigquer"
+ + "ystorage.googleapis.com/WriteStream\0220\n\006o"
+ + "ffset\030\002 \001(\0132\033.google.protobuf.Int64Value"
+ + "B\003\340A\001\022Y\n\nproto_rows\030\004 \001(\0132C.google.cloud"
+ + ".bigquery.storage.v1alpha2.AppendRowsReq"
+ + "uest.ProtoDataH\000\032\230\001\n\tProtoData\022J\n\rwriter"
+ + "_schema\030\001 \001(\01323.google.cloud.bigquery.st"
+ + "orage.v1alpha2.ProtoSchema\022?\n\004rows\030\002 \001(\013"
+ + "21.google.cloud.bigquery.storage.v1alpha"
+ + "2.ProtoRowsB\006\n\004rows\"W\n\022AppendRowsRespons"
+ + "e\022\020\n\006offset\030\001 \001(\003H\000\022#\n\005error\030\002 \001(\0132\022.goo"
+ + "gle.rpc.StatusH\000B\n\n\010response\"Y\n\025GetWrite"
+ + "StreamRequest\022@\n\004name\030\001 \001(\tB2\340A\002\372A,\n*big"
+ + "querystorage.googleapis.com/WriteStream\""
+ + "z\n\036BatchCommitWriteStreamsRequest\022<\n\006par"
+ + "ent\030\001 \001(\tB,\340A\002\372A&\n$bigquerystorage.googl"
+ + "eapis.com/Table\022\032\n\rwrite_streams\030\002 \003(\tB\003"
+ + "\340A\002\"R\n\037BatchCommitWriteStreamsResponse\022/"
+ + "\n\013commit_time\030\001 \001(\0132\032.google.protobuf.Ti"
+ + "mestamp\"^\n\032FinalizeWriteStreamRequest\022@\n"
+ + "\004name\030\001 \001(\tB2\340A\002\372A,\n*bigquerystorage.goo"
+ + "gleapis.com/WriteStream\"0\n\033FinalizeWrite"
+ + "StreamResponse\022\021\n\trow_count\030\001 \001(\0032\310\n\n\rBi"
+ + "gQueryWrite\022\351\001\n\021CreateWriteStream\022@.goog"
+ + "le.cloud.bigquery.storage.v1alpha2.Creat"
+ + "eWriteStreamRequest\0323.google.cloud.bigqu"
+ + "ery.storage.v1alpha2.WriteStream\"]\202\323\344\223\002A"
+ + "\"1/v1alpha2/{parent=projects/*/datasets/"
+ + "*/tables/*}:\014write_stream\332A\023parent,write"
+ + "_stream\022\344\001\n\nAppendRows\0229.google.cloud.bi"
+ + "gquery.storage.v1alpha2.AppendRowsReques"
+ + "t\032:.google.cloud.bigquery.storage.v1alph"
+ + "a2.AppendRowsResponse\"[\202\323\344\223\002F\"A/v1alpha2"
+ + "/{write_stream=projects/*/datasets/*/tab"
+ + "les/*/streams/*}:\001*\332A\014write_stream(\0010\001\022\321"
+ + "\001\n\016GetWriteStream\022=.google.cloud.bigquer"
+ + "y.storage.v1alpha2.GetWriteStreamRequest"
+ + "\0323.google.cloud.bigquery.storage.v1alpha"
+ + "2.WriteStream\"K\202\323\344\223\002>\"9/v1alpha2/{name=p"
+ "rojects/*/datasets/*/tables/*/streams/*}"
- + ":\001*\022\345\001\n\027BatchCommitWriteStreams\022F.google"
- + ".cloud.bigquery.storage.v1alpha2.BatchCo"
- + "mmitWriteStreamsRequest\032G.google.cloud.b"
- + "igquery.storage.v1alpha2.BatchCommitWrit"
- + "eStreamsResponse\"9\202\323\344\223\0023\0221/v1alpha2/{par"
- + "ent=projects/*/datasets/*/tables/*}\032\260\001\312A"
- + "\036bigquerystorage.googleapis.com\322A\213\001https"
- + "://www.googleapis.com/auth/bigquery,http"
- + "s://www.googleapis.com/auth/bigquery.ins"
- + "ertdata,https://www.googleapis.com/auth/"
- + "cloud-platformB{\n*com.google.cloud.bigqu"
- + "ery.storage.v1alpha2ZMgoogle.golang.org/"
- + "genproto/googleapis/cloud/bigquery/stora"
- + "ge/v1alpha2;storageb\006proto3"
+ + ":\001*\332A\004name\022\353\001\n\023FinalizeWriteStream\022B.goo"
+ + "gle.cloud.bigquery.storage.v1alpha2.Fina"
+ + "lizeWriteStreamRequest\032C.google.cloud.bi"
+ + "gquery.storage.v1alpha2.FinalizeWriteStr"
+ + "eamResponse\"K\202\323\344\223\002>\"9/v1alpha2/{name=pro"
+ + "jects/*/datasets/*/tables/*/streams/*}:\001"
+ + "*\332A\004name\022\356\001\n\027BatchCommitWriteStreams\022F.g"
+ + "oogle.cloud.bigquery.storage.v1alpha2.Ba"
+ + "tchCommitWriteStreamsRequest\032G.google.cl"
+ + "oud.bigquery.storage.v1alpha2.BatchCommi"
+ + "tWriteStreamsResponse\"B\202\323\344\223\0023\0221/v1alpha2"
+ + "/{parent=projects/*/datasets/*/tables/*}"
+ + "\332A\006parent\032\260\001\312A\036bigquerystorage.googleapi"
+ + "s.com\322A\213\001https://www.googleapis.com/auth"
+ + "/bigquery,https://www.googleapis.com/aut"
+ + "h/bigquery.insertdata,https://www.google"
+ + "apis.com/auth/cloud-platformB{\n*com.goog"
+ + "le.cloud.bigquery.storage.v1alpha2ZMgoog"
+ + "le.golang.org/genproto/googleapis/cloud/"
+ + "bigquery/storage/v1alpha2;storageb\006proto"
+ + "3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
@@ -8501,8 +8601,10 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
com.google.api.AnnotationsProto.getDescriptor(),
com.google.api.ClientProto.getDescriptor(),
com.google.api.FieldBehaviorProto.getDescriptor(),
+ com.google.api.ResourceProto.getDescriptor(),
com.google.cloud.bigquery.storage.v1alpha2.ProtoBufProto.getDescriptor(),
com.google.cloud.bigquery.storage.v1alpha2.Stream.getDescriptor(),
+ com.google.cloud.bigquery.storage.v1alpha2.Table.getDescriptor(),
com.google.protobuf.EmptyProto.getDescriptor(),
com.google.protobuf.TimestampProto.getDescriptor(),
com.google.protobuf.WrappersProto.getDescriptor(),
@@ -8587,14 +8689,18 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
registry.add(com.google.api.ClientProto.defaultHost);
registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
registry.add(com.google.api.AnnotationsProto.http);
+ registry.add(com.google.api.ClientProto.methodSignature);
registry.add(com.google.api.ClientProto.oauthScopes);
+ registry.add(com.google.api.ResourceProto.resourceReference);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.AnnotationsProto.getDescriptor();
com.google.api.ClientProto.getDescriptor();
com.google.api.FieldBehaviorProto.getDescriptor();
+ com.google.api.ResourceProto.getDescriptor();
com.google.cloud.bigquery.storage.v1alpha2.ProtoBufProto.getDescriptor();
com.google.cloud.bigquery.storage.v1alpha2.Stream.getDescriptor();
+ com.google.cloud.bigquery.storage.v1alpha2.Table.getDescriptor();
com.google.protobuf.EmptyProto.getDescriptor();
com.google.protobuf.TimestampProto.getDescriptor();
com.google.protobuf.WrappersProto.getDescriptor();
diff --git a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/Stream.java b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/Stream.java
index 944a94d456..6019ba199b 100644
--- a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/Stream.java
+++ b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/Stream.java
@@ -37,7 +37,7 @@ public interface WriteStreamOrBuilder
*
*
* Output only. Name of the stream, in the form
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
* string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
@@ -50,7 +50,7 @@ public interface WriteStreamOrBuilder
*
*
* Output only. Name of the stream, in the form
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
* string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
@@ -432,6 +432,16 @@ public enum Type implements com.google.protobuf.ProtocolMessageEnum {
* PENDING = 2;
*/
PENDING(2),
+ /**
+ *
+ *
+ *
+ * Data is only visible up to the offset to which it was flushed.
+ *
+ *
+ * BUFFERED = 3;
+ */
+ BUFFERED(3),
UNRECOGNIZED(-1),
;
@@ -466,6 +476,16 @@ public enum Type implements com.google.protobuf.ProtocolMessageEnum {
* PENDING = 2;
*/
public static final int PENDING_VALUE = 2;
+ /**
+ *
+ *
+ *
+ * Data is only visible up to the offset to which it was flushed.
+ *
+ *
+ * BUFFERED = 3;
+ */
+ public static final int BUFFERED_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
@@ -497,6 +517,8 @@ public static Type forNumber(int value) {
return COMMITTED;
case 2:
return PENDING;
+ case 3:
+ return BUFFERED;
default:
return null;
}
@@ -555,7 +577,7 @@ private Type(int value) {
*
*
* Output only. Name of the stream, in the form
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
* string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
@@ -578,7 +600,7 @@ public java.lang.String getName() {
*
*
* Output only. Name of the stream, in the form
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
* string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
@@ -1292,7 +1314,7 @@ public Builder mergeFrom(
*
*
* Output only. Name of the stream, in the form
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
* string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
@@ -1315,7 +1337,7 @@ public java.lang.String getName() {
*
*
* Output only. Name of the stream, in the form
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
* string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
@@ -1338,7 +1360,7 @@ public com.google.protobuf.ByteString getNameBytes() {
*
*
* Output only. Name of the stream, in the form
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
* string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
@@ -1360,7 +1382,7 @@ public Builder setName(java.lang.String value) {
*
*
* Output only. Name of the stream, in the form
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
* string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
@@ -1378,7 +1400,7 @@ public Builder clearName() {
*
*
* Output only. Name of the stream, in the form
- * `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
*
*
* string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
@@ -2314,28 +2336,35 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
"\n3google/cloud/bigquery/storage/v1alpha2"
+ "/stream.proto\022&google.cloud.bigquery.sto"
+ "rage.v1alpha2\032\037google/api/field_behavior"
- + ".proto\0322google/cloud/bigquery/storage/v1"
- + "alpha2/table.proto\032\037google/protobuf/time"
- + "stamp.proto\"\370\002\n\013WriteStream\022\021\n\004name\030\001 \001("
- + "\tB\003\340A\003\022K\n\004type\030\002 \001(\01628.google.cloud.bigq"
- + "uery.storage.v1alpha2.WriteStream.TypeB\003"
- + "\340A\005\0224\n\013create_time\030\003 \001(\0132\032.google.protob"
- + "uf.TimestampB\003\340A\003\0224\n\013commit_time\030\004 \001(\0132\032"
- + ".google.protobuf.TimestampB\003\340A\003\022N\n\014table"
- + "_schema\030\005 \001(\01323.google.cloud.bigquery.st"
- + "orage.v1alpha2.TableSchemaB\003\340A\003\022\023\n\013exter"
- + "nal_id\030\006 \001(\t\"8\n\004Type\022\024\n\020TYPE_UNSPECIFIED"
- + "\020\000\022\r\n\tCOMMITTED\020\001\022\013\n\007PENDING\020\002B{\n*com.go"
- + "ogle.cloud.bigquery.storage.v1alpha2ZMgo"
- + "ogle.golang.org/genproto/googleapis/clou"
- + "d/bigquery/storage/v1alpha2;storageb\006pro"
- + "to3"
+ + ".proto\032\031google/api/resource.proto\0322googl"
+ + "e/cloud/bigquery/storage/v1alpha2/table."
+ + "proto\032\037google/protobuf/timestamp.proto\"\376"
+ + "\003\n\013WriteStream\022\021\n\004name\030\001 \001(\tB\003\340A\003\022K\n\004typ"
+ + "e\030\002 \001(\01628.google.cloud.bigquery.storage."
+ + "v1alpha2.WriteStream.TypeB\003\340A\005\0224\n\013create"
+ + "_time\030\003 \001(\0132\032.google.protobuf.TimestampB"
+ + "\003\340A\003\0224\n\013commit_time\030\004 \001(\0132\032.google.proto"
+ + "buf.TimestampB\003\340A\003\022N\n\014table_schema\030\005 \001(\013"
+ + "23.google.cloud.bigquery.storage.v1alpha"
+ + "2.TableSchemaB\003\340A\003\022\023\n\013external_id\030\006 \001(\t\""
+ + "F\n\004Type\022\024\n\020TYPE_UNSPECIFIED\020\000\022\r\n\tCOMMITT"
+ + "ED\020\001\022\013\n\007PENDING\020\002\022\014\n\010BUFFERED\020\003:v\352As\n*bi"
+ + "gquerystorage.googleapis.com/WriteStream"
+ + "\022Eprojects/{project}/datasets/{dataset}/"
+ + "tables/{table}/streams/{stream}B\332\001\n*com."
+ + "google.cloud.bigquery.storage.v1alpha2ZM"
+ + "google.golang.org/genproto/googleapis/cl"
+ + "oud/bigquery/storage/v1alpha2;storage\352A\\"
+ + "\n$bigquerystorage.googleapis.com/Table\0224"
+ + "projects/{project}/datasets/{dataset}/ta"
+ + "bles/{table}b\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.FieldBehaviorProto.getDescriptor(),
+ com.google.api.ResourceProto.getDescriptor(),
com.google.cloud.bigquery.storage.v1alpha2.Table.getDescriptor(),
com.google.protobuf.TimestampProto.getDescriptor(),
});
@@ -2350,9 +2379,12 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
+ registry.add(com.google.api.ResourceProto.resource);
+ registry.add(com.google.api.ResourceProto.resourceDefinition);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.FieldBehaviorProto.getDescriptor();
+ com.google.api.ResourceProto.getDescriptor();
com.google.cloud.bigquery.storage.v1alpha2.Table.getDescriptor();
com.google.protobuf.TimestampProto.getDescriptor();
}
diff --git a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/TableName.java b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/TableName.java
new file mode 100644
index 0000000000..47ab519a3a
--- /dev/null
+++ b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/TableName.java
@@ -0,0 +1,204 @@
+/*
+ * Copyright 2020 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery.storage.v1alpha2;
+
+import com.google.api.pathtemplate.PathTemplate;
+import com.google.api.resourcenames.ResourceName;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableMap;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/** AUTO-GENERATED DOCUMENTATION AND CLASS */
+@javax.annotation.Generated("by GAPIC protoc plugin")
+public class TableName implements ResourceName {
+
+ private static final PathTemplate PATH_TEMPLATE =
+ PathTemplate.createWithoutUrlEncoding("projects/{project}/datasets/{dataset}/tables/{table}");
+
+ private volatile Map fieldValuesMap;
+
+ private final String project;
+ private final String dataset;
+ private final String table;
+
+ public String getProject() {
+ return project;
+ }
+
+ public String getDataset() {
+ return dataset;
+ }
+
+ public String getTable() {
+ return table;
+ }
+
+ public static Builder newBuilder() {
+ return new Builder();
+ }
+
+ public Builder toBuilder() {
+ return new Builder(this);
+ }
+
+ private TableName(Builder builder) {
+ project = Preconditions.checkNotNull(builder.getProject());
+ dataset = Preconditions.checkNotNull(builder.getDataset());
+ table = Preconditions.checkNotNull(builder.getTable());
+ }
+
+ public static TableName of(String project, String dataset, String table) {
+ return newBuilder().setProject(project).setDataset(dataset).setTable(table).build();
+ }
+
+ public static String format(String project, String dataset, String table) {
+ return newBuilder().setProject(project).setDataset(dataset).setTable(table).build().toString();
+ }
+
+ public static TableName parse(String formattedString) {
+ if (formattedString.isEmpty()) {
+ return null;
+ }
+ Map matchMap =
+ PATH_TEMPLATE.validatedMatch(
+ formattedString, "TableName.parse: formattedString not in valid format");
+ return of(matchMap.get("project"), matchMap.get("dataset"), matchMap.get("table"));
+ }
+
+ public static List parseList(List formattedStrings) {
+ List list = new ArrayList<>(formattedStrings.size());
+ for (String formattedString : formattedStrings) {
+ list.add(parse(formattedString));
+ }
+ return list;
+ }
+
+ public static List toStringList(List values) {
+ List list = new ArrayList(values.size());
+ for (TableName value : values) {
+ if (value == null) {
+ list.add("");
+ } else {
+ list.add(value.toString());
+ }
+ }
+ return list;
+ }
+
+ public static boolean isParsableFrom(String formattedString) {
+ return PATH_TEMPLATE.matches(formattedString);
+ }
+
+ public Map getFieldValuesMap() {
+ if (fieldValuesMap == null) {
+ synchronized (this) {
+ if (fieldValuesMap == null) {
+ ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder();
+ fieldMapBuilder.put("project", project);
+ fieldMapBuilder.put("dataset", dataset);
+ fieldMapBuilder.put("table", table);
+ fieldValuesMap = fieldMapBuilder.build();
+ }
+ }
+ }
+ return fieldValuesMap;
+ }
+
+ public String getFieldValue(String fieldName) {
+ return getFieldValuesMap().get(fieldName);
+ }
+
+ @Override
+ public String toString() {
+ return PATH_TEMPLATE.instantiate("project", project, "dataset", dataset, "table", table);
+ }
+
+ /** Builder for TableName. */
+ public static class Builder {
+
+ private String project;
+ private String dataset;
+ private String table;
+
+ public String getProject() {
+ return project;
+ }
+
+ public String getDataset() {
+ return dataset;
+ }
+
+ public String getTable() {
+ return table;
+ }
+
+ public Builder setProject(String project) {
+ this.project = project;
+ return this;
+ }
+
+ public Builder setDataset(String dataset) {
+ this.dataset = dataset;
+ return this;
+ }
+
+ public Builder setTable(String table) {
+ this.table = table;
+ return this;
+ }
+
+ private Builder() {}
+
+ private Builder(TableName tableName) {
+ project = tableName.project;
+ dataset = tableName.dataset;
+ table = tableName.table;
+ }
+
+ public TableName build() {
+ return new TableName(this);
+ }
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == this) {
+ return true;
+ }
+ if (o instanceof TableName) {
+ TableName that = (TableName) o;
+ return (this.project.equals(that.project))
+ && (this.dataset.equals(that.dataset))
+ && (this.table.equals(that.table));
+ }
+ return false;
+ }
+
+ @Override
+ public int hashCode() {
+ int h = 1;
+ h *= 1000003;
+ h ^= project.hashCode();
+ h *= 1000003;
+ h ^= dataset.hashCode();
+ h *= 1000003;
+ h ^= table.hashCode();
+ return h;
+ }
+}
diff --git a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriteStreamName.java b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriteStreamName.java
new file mode 100644
index 0000000000..dbc4bd48a7
--- /dev/null
+++ b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/java/com/google/cloud/bigquery/storage/v1alpha2/WriteStreamName.java
@@ -0,0 +1,242 @@
+/*
+ * Copyright 2020 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery.storage.v1alpha2;
+
+import com.google.api.pathtemplate.PathTemplate;
+import com.google.api.resourcenames.ResourceName;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableMap;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/** AUTO-GENERATED DOCUMENTATION AND CLASS */
+@javax.annotation.Generated("by GAPIC protoc plugin")
+public class WriteStreamName implements ResourceName {
+
+ private static final PathTemplate PATH_TEMPLATE =
+ PathTemplate.createWithoutUrlEncoding(
+ "projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}");
+
+ private volatile Map fieldValuesMap;
+
+ private final String project;
+ private final String dataset;
+ private final String table;
+ private final String stream;
+
+ public String getProject() {
+ return project;
+ }
+
+ public String getDataset() {
+ return dataset;
+ }
+
+ public String getTable() {
+ return table;
+ }
+
+ public String getStream() {
+ return stream;
+ }
+
+ public static Builder newBuilder() {
+ return new Builder();
+ }
+
+ public Builder toBuilder() {
+ return new Builder(this);
+ }
+
+ private WriteStreamName(Builder builder) {
+ project = Preconditions.checkNotNull(builder.getProject());
+ dataset = Preconditions.checkNotNull(builder.getDataset());
+ table = Preconditions.checkNotNull(builder.getTable());
+ stream = Preconditions.checkNotNull(builder.getStream());
+ }
+
+ public static WriteStreamName of(String project, String dataset, String table, String stream) {
+ return newBuilder()
+ .setProject(project)
+ .setDataset(dataset)
+ .setTable(table)
+ .setStream(stream)
+ .build();
+ }
+
+ public static String format(String project, String dataset, String table, String stream) {
+ return newBuilder()
+ .setProject(project)
+ .setDataset(dataset)
+ .setTable(table)
+ .setStream(stream)
+ .build()
+ .toString();
+ }
+
+ public static WriteStreamName parse(String formattedString) {
+ if (formattedString.isEmpty()) {
+ return null;
+ }
+ Map matchMap =
+ PATH_TEMPLATE.validatedMatch(
+ formattedString, "WriteStreamName.parse: formattedString not in valid format");
+ return of(
+ matchMap.get("project"),
+ matchMap.get("dataset"),
+ matchMap.get("table"),
+ matchMap.get("stream"));
+ }
+
+ public static List parseList(List formattedStrings) {
+ List list = new ArrayList<>(formattedStrings.size());
+ for (String formattedString : formattedStrings) {
+ list.add(parse(formattedString));
+ }
+ return list;
+ }
+
+ public static List toStringList(List values) {
+ List list = new ArrayList(values.size());
+ for (WriteStreamName value : values) {
+ if (value == null) {
+ list.add("");
+ } else {
+ list.add(value.toString());
+ }
+ }
+ return list;
+ }
+
+ public static boolean isParsableFrom(String formattedString) {
+ return PATH_TEMPLATE.matches(formattedString);
+ }
+
+ public Map getFieldValuesMap() {
+ if (fieldValuesMap == null) {
+ synchronized (this) {
+ if (fieldValuesMap == null) {
+ ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder();
+ fieldMapBuilder.put("project", project);
+ fieldMapBuilder.put("dataset", dataset);
+ fieldMapBuilder.put("table", table);
+ fieldMapBuilder.put("stream", stream);
+ fieldValuesMap = fieldMapBuilder.build();
+ }
+ }
+ }
+ return fieldValuesMap;
+ }
+
+ public String getFieldValue(String fieldName) {
+ return getFieldValuesMap().get(fieldName);
+ }
+
+ @Override
+ public String toString() {
+ return PATH_TEMPLATE.instantiate(
+ "project", project, "dataset", dataset, "table", table, "stream", stream);
+ }
+
+ /** Builder for WriteStreamName. */
+ public static class Builder {
+
+ private String project;
+ private String dataset;
+ private String table;
+ private String stream;
+
+ public String getProject() {
+ return project;
+ }
+
+ public String getDataset() {
+ return dataset;
+ }
+
+ public String getTable() {
+ return table;
+ }
+
+ public String getStream() {
+ return stream;
+ }
+
+ public Builder setProject(String project) {
+ this.project = project;
+ return this;
+ }
+
+ public Builder setDataset(String dataset) {
+ this.dataset = dataset;
+ return this;
+ }
+
+ public Builder setTable(String table) {
+ this.table = table;
+ return this;
+ }
+
+ public Builder setStream(String stream) {
+ this.stream = stream;
+ return this;
+ }
+
+ private Builder() {}
+
+ private Builder(WriteStreamName writeStreamName) {
+ project = writeStreamName.project;
+ dataset = writeStreamName.dataset;
+ table = writeStreamName.table;
+ stream = writeStreamName.stream;
+ }
+
+ public WriteStreamName build() {
+ return new WriteStreamName(this);
+ }
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == this) {
+ return true;
+ }
+ if (o instanceof WriteStreamName) {
+ WriteStreamName that = (WriteStreamName) o;
+ return (this.project.equals(that.project))
+ && (this.dataset.equals(that.dataset))
+ && (this.table.equals(that.table))
+ && (this.stream.equals(that.stream));
+ }
+ return false;
+ }
+
+ @Override
+ public int hashCode() {
+ int h = 1;
+ h *= 1000003;
+ h ^= project.hashCode();
+ h *= 1000003;
+ h ^= dataset.hashCode();
+ h *= 1000003;
+ h ^= table.hashCode();
+ h *= 1000003;
+ h ^= stream.hashCode();
+ return h;
+ }
+}
diff --git a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/proto/google/cloud/bigquery/storage/v1alpha2/protobuf.proto b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/proto/google/cloud/bigquery/storage/v1alpha2/protobuf.proto
index 4f58b0d932..882adf5b42 100644
--- a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/proto/google/cloud/bigquery/storage/v1alpha2/protobuf.proto
+++ b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/proto/google/cloud/bigquery/storage/v1alpha2/protobuf.proto
@@ -1,4 +1,4 @@
-// Copyright 2019 Google LLC.
+// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -11,7 +11,6 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
-//
syntax = "proto3";
@@ -25,9 +24,8 @@ option java_package = "com.google.cloud.bigquery.storage.v1alpha2";
// Protobuf schema is an API presentation the proto buffer schema.
message ProtoSchema {
- // Message descriptor for the data. The descriptor has to be self contained
- // to include all the nested type definition, excepted for proto buffer well
- // known types
+ // Descriptor for input message. The descriptor has to be self contained,
+ // including all the nested types, excepted for proto buffer well known types
// (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf)
// and zetasql public protos
// (https://github.com/google/zetasql/tree/master/zetasql/public/proto).
diff --git a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/proto/google/cloud/bigquery/storage/v1alpha2/storage.proto b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/proto/google/cloud/bigquery/storage/v1alpha2/storage.proto
index e93306371d..fd13ba3a12 100644
--- a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/proto/google/cloud/bigquery/storage/v1alpha2/storage.proto
+++ b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/proto/google/cloud/bigquery/storage/v1alpha2/storage.proto
@@ -1,4 +1,4 @@
-// Copyright 2019 Google LLC.
+// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -11,7 +11,6 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
-//
syntax = "proto3";
@@ -20,8 +19,10 @@ package google.cloud.bigquery.storage.v1alpha2;
import "google/api/annotations.proto";
import "google/api/client.proto";
import "google/api/field_behavior.proto";
+import "google/api/resource.proto";
import "google/cloud/bigquery/storage/v1alpha2/protobuf.proto";
import "google/cloud/bigquery/storage/v1alpha2/stream.proto";
+import "google/cloud/bigquery/storage/v1alpha2/table.proto";
import "google/protobuf/empty.proto";
import "google/protobuf/timestamp.proto";
import "google/protobuf/wrappers.proto";
@@ -33,8 +34,13 @@ option java_package = "com.google.cloud.bigquery.storage.v1alpha2";
// Request message for `CreateWriteStream`.
message CreateWriteStreamRequest {
// Required. Reference to the table to which the stream belongs, in the format
- // of `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
- string parent = 1 [(google.api.field_behavior) = REQUIRED];
+ // of `projects/{project}/datasets/{dataset}/tables/{table}`.
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "bigquerystorage.googleapis.com/Table"
+ }
+ ];
// Required. Stream to be created.
WriteStream write_stream = 2 [(google.api.field_behavior) = REQUIRED];
@@ -53,7 +59,12 @@ message AppendRowsRequest {
// Required. The stream that is the target of the append operation. This value must be
// specified for the initial request. If subsequent requests specify the
// stream name, it must equal to the value provided in the first request.
- string write_stream = 1 [(google.api.field_behavior) = REQUIRED];
+ string write_stream = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "bigquerystorage.googleapis.com/WriteStream"
+ }
+ ];
// Optional. If present, the write is only performed if the next append offset is same
// as the provided value. If not present, the write is performed at the
@@ -84,15 +95,25 @@ message AppendRowsResponse {
// Request message for `GetWriteStreamRequest`.
message GetWriteStreamRequest {
// Required. Name of the stream to get, in the form of
- // `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
- string name = 1 [(google.api.field_behavior) = REQUIRED];
+ // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "bigquerystorage.googleapis.com/WriteStream"
+ }
+ ];
}
// Request message for `BatchCommitWriteStreams`.
message BatchCommitWriteStreamsRequest {
// Required. Parent table that all the streams should belong to, in the form of
- // `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
- string parent = 1 [(google.api.field_behavior) = REQUIRED];
+ // `projects/{project}/datasets/{dataset}/tables/{table}`.
+ string parent = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "bigquerystorage.googleapis.com/Table"
+ }
+ ];
// Required. The group of streams that will be committed atomically.
repeated string write_streams = 2 [(google.api.field_behavior) = REQUIRED];
@@ -107,8 +128,13 @@ message BatchCommitWriteStreamsResponse {
// Request message for invoking `FinalizeWriteStream`.
message FinalizeWriteStreamRequest {
// Required. Name of the stream to finalize, in the form of
- // `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
- string name = 1 [(google.api.field_behavior) = REQUIRED];
+ // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
+ string name = 1 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "bigquerystorage.googleapis.com/WriteStream"
+ }
+ ];
}
// Response message for `FinalizeWriteStream`.
@@ -131,8 +157,9 @@ service BigQueryWrite {
rpc CreateWriteStream(CreateWriteStreamRequest) returns (WriteStream) {
option (google.api.http) = {
post: "/v1alpha2/{parent=projects/*/datasets/*/tables/*}"
- body: "*"
+ body: "write_stream"
};
+ option (google.api.method_signature) = "parent,write_stream";
}
// Appends data to the given stream.
@@ -159,6 +186,7 @@ service BigQueryWrite {
post: "/v1alpha2/{write_stream=projects/*/datasets/*/tables/*/streams/*}"
body: "*"
};
+ option (google.api.method_signature) = "write_stream";
}
// Gets a write stream.
@@ -167,6 +195,7 @@ service BigQueryWrite {
post: "/v1alpha2/{name=projects/*/datasets/*/tables/*/streams/*}"
body: "*"
};
+ option (google.api.method_signature) = "name";
}
// Finalize a write stream so that no new data can be appended to the
@@ -176,6 +205,7 @@ service BigQueryWrite {
post: "/v1alpha2/{name=projects/*/datasets/*/tables/*/streams/*}"
body: "*"
};
+ option (google.api.method_signature) = "name";
}
// Atomically commits a group of `PENDING` streams that belong to the same
@@ -187,5 +217,7 @@ service BigQueryWrite {
option (google.api.http) = {
get: "/v1alpha2/{parent=projects/*/datasets/*/tables/*}"
};
+ option (google.api.method_signature) = "parent";
}
+
}
diff --git a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/proto/google/cloud/bigquery/storage/v1alpha2/stream.proto b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/proto/google/cloud/bigquery/storage/v1alpha2/stream.proto
index a50822d1ac..0ec0ef81a2 100644
--- a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/proto/google/cloud/bigquery/storage/v1alpha2/stream.proto
+++ b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/proto/google/cloud/bigquery/storage/v1alpha2/stream.proto
@@ -1,4 +1,4 @@
-// Copyright 2019 Google LLC.
+// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -11,21 +11,30 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
-//
syntax = "proto3";
package google.cloud.bigquery.storage.v1alpha2;
import "google/api/field_behavior.proto";
+import "google/api/resource.proto";
import "google/cloud/bigquery/storage/v1alpha2/table.proto";
import "google/protobuf/timestamp.proto";
option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1alpha2;storage";
option java_package = "com.google.cloud.bigquery.storage.v1alpha2";
+option (google.api.resource_definition) = {
+ type: "bigquerystorage.googleapis.com/Table"
+ pattern: "projects/{project}/datasets/{dataset}/tables/{table}"
+};
// Information about a single stream that gets data inside the storage system.
message WriteStream {
+ option (google.api.resource) = {
+ type: "bigquerystorage.googleapis.com/WriteStream"
+ pattern: "projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}"
+ };
+
enum Type {
// Unknown type.
TYPE_UNSPECIFIED = 0;
@@ -36,10 +45,13 @@ message WriteStream {
// Data is invisible until the stream is committed.
PENDING = 2;
+
+ // Data is only visible up to the offset to which it was flushed.
+ BUFFERED = 3;
}
// Output only. Name of the stream, in the form
- // `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ // `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY];
Type type = 2 [(google.api.field_behavior) = IMMUTABLE];
diff --git a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/proto/google/cloud/bigquery/storage/v1alpha2/table.proto b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/proto/google/cloud/bigquery/storage/v1alpha2/table.proto
index fb1dc4aacd..d4bc017165 100644
--- a/proto-google-cloud-bigquerystorage-v1alpha2/src/main/proto/google/cloud/bigquery/storage/v1alpha2/table.proto
+++ b/proto-google-cloud-bigquerystorage-v1alpha2/src/main/proto/google/cloud/bigquery/storage/v1alpha2/table.proto
@@ -1,4 +1,4 @@
-// Copyright 2019 Google LLC.
+// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -11,7 +11,6 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
-//
syntax = "proto3";
diff --git a/proto-google-cloud-bigquerystorage-v1beta1/pom.xml b/proto-google-cloud-bigquerystorage-v1beta1/pom.xml
index f65681936b..1d35d8ba1c 100644
--- a/proto-google-cloud-bigquerystorage-v1beta1/pom.xml
+++ b/proto-google-cloud-bigquerystorage-v1beta1/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1beta1
- 0.96.1
+ 0.96.2
proto-google-cloud-bigquerystorage-v1beta1
PROTO library for proto-google-cloud-bigquerystorage-v1beta1
com.google.cloud
google-cloud-bigquerystorage-parent
- 0.131.1-beta
+ 0.131.2-beta
diff --git a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/Storage.java b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/Storage.java
index d08601548c..5f813d2e7e 100644
--- a/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/Storage.java
+++ b/proto-google-cloud-bigquerystorage-v1beta1/src/main/java/com/google/cloud/bigquery/storage/v1beta1/Storage.java
@@ -5109,7 +5109,9 @@ public interface CreateReadSessionRequestOrBuilder
* be billed for usage.
*
*
- * string parent = 6 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 6 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The parent.
*/
@@ -5123,7 +5125,9 @@ public interface CreateReadSessionRequestOrBuilder
* be billed for usage.
*
*
- * string parent = 6 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 6 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The bytes for parent.
*/
@@ -5511,7 +5515,9 @@ public boolean hasTableReference() {
* be billed for usage.
*
*
- * string parent = 6 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 6 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The parent.
*/
@@ -5535,7 +5541,9 @@ public java.lang.String getParent() {
* be billed for usage.
*
*
- * string parent = 6 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 6 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The bytes for parent.
*/
@@ -6453,7 +6461,9 @@ public Builder clearTableReference() {
* be billed for usage.
*
*
- * string parent = 6 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 6 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The parent.
*/
@@ -6477,7 +6487,9 @@ public java.lang.String getParent() {
* be billed for usage.
*
*
- * string parent = 6 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 6 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return The bytes for parent.
*/
@@ -6501,7 +6513,9 @@ public com.google.protobuf.ByteString getParentBytes() {
* be billed for usage.
*
*
- * string parent = 6 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 6 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @param value The parent to set.
* @return This builder for chaining.
@@ -6524,7 +6538,9 @@ public Builder setParent(java.lang.String value) {
* be billed for usage.
*
*
- * string parent = 6 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 6 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @return This builder for chaining.
*/
@@ -6543,7 +6559,9 @@ public Builder clearParent() {
* be billed for usage.
*
*
- * string parent = 6 [(.google.api.field_behavior) = REQUIRED];
+ *
+ * string parent = 6 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
+ *
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
@@ -8146,8 +8164,6 @@ public interface StreamStatusOrBuilder
*
*
* Represents the progress of the current stream.
- * Note: This value is under development and should not be used. Use
- * `fraction_consumed` instead.
*
*
* .google.cloud.bigquery.storage.v1beta1.Progress progress = 4;
@@ -8160,8 +8176,6 @@ public interface StreamStatusOrBuilder
*
*
* Represents the progress of the current stream.
- * Note: This value is under development and should not be used. Use
- * `fraction_consumed` instead.
*
*
* .google.cloud.bigquery.storage.v1beta1.Progress progress = 4;
@@ -8174,8 +8188,6 @@ public interface StreamStatusOrBuilder
*
*
* Represents the progress of the current stream.
- * Note: This value is under development and should not be used. Use
- * `fraction_consumed` instead.
*
*
* .google.cloud.bigquery.storage.v1beta1.Progress progress = 4;
@@ -8364,8 +8376,6 @@ public float getFractionConsumed() {
*
*
* Represents the progress of the current stream.
- * Note: This value is under development and should not be used. Use
- * `fraction_consumed` instead.
*
*
* .google.cloud.bigquery.storage.v1beta1.Progress progress = 4;
@@ -8380,8 +8390,6 @@ public boolean hasProgress() {
*
*
* Represents the progress of the current stream.
- * Note: This value is under development and should not be used. Use
- * `fraction_consumed` instead.
*
*
* .google.cloud.bigquery.storage.v1beta1.Progress progress = 4;
@@ -8398,8 +8406,6 @@ public com.google.cloud.bigquery.storage.v1beta1.Storage.Progress getProgress()
*
*
* Represents the progress of the current stream.
- * Note: This value is under development and should not be used. Use
- * `fraction_consumed` instead.
*
*
* .google.cloud.bigquery.storage.v1beta1.Progress progress = 4;
@@ -8949,8 +8955,6 @@ public Builder clearFractionConsumed() {
*
*
* Represents the progress of the current stream.
- * Note: This value is under development and should not be used. Use
- * `fraction_consumed` instead.
*
*
* .google.cloud.bigquery.storage.v1beta1.Progress progress = 4;
@@ -8965,8 +8969,6 @@ public boolean hasProgress() {
*
*
* Represents the progress of the current stream.
- * Note: This value is under development and should not be used. Use
- * `fraction_consumed` instead.
*
*
* .google.cloud.bigquery.storage.v1beta1.Progress progress = 4;
@@ -8987,8 +8989,6 @@ public com.google.cloud.bigquery.storage.v1beta1.Storage.Progress getProgress()
*
*
* Represents the progress of the current stream.
- * Note: This value is under development and should not be used. Use
- * `fraction_consumed` instead.
*
*
* .google.cloud.bigquery.storage.v1beta1.Progress progress = 4;
@@ -9011,8 +9011,6 @@ public Builder setProgress(com.google.cloud.bigquery.storage.v1beta1.Storage.Pro
*
*
* Represents the progress of the current stream.
- * Note: This value is under development and should not be used. Use
- * `fraction_consumed` instead.
*
*
* .google.cloud.bigquery.storage.v1beta1.Progress progress = 4;
@@ -9033,8 +9031,6 @@ public Builder setProgress(
*
*
* Represents the progress of the current stream.
- * Note: This value is under development and should not be used. Use
- * `fraction_consumed` instead.
*
*
* .google.cloud.bigquery.storage.v1beta1.Progress progress = 4;
@@ -9062,8 +9058,6 @@ public Builder mergeProgress(
*
*
* Represents the progress of the current stream.
- * Note: This value is under development and should not be used. Use
- * `fraction_consumed` instead.
*
*
* .google.cloud.bigquery.storage.v1beta1.Progress progress = 4;
@@ -9084,8 +9078,6 @@ public Builder clearProgress() {
*
*
* Represents the progress of the current stream.
- * Note: This value is under development and should not be used. Use
- * `fraction_consumed` instead.
*
*
* .google.cloud.bigquery.storage.v1beta1.Progress progress = 4;
@@ -9101,8 +9093,6 @@ public Builder clearProgress() {
*
*
* Represents the progress of the current stream.
- * Note: This value is under development and should not be used. Use
- * `fraction_consumed` instead.
*
*
* .google.cloud.bigquery.storage.v1beta1.Progress progress = 4;
@@ -9122,8 +9112,6 @@ public Builder clearProgress() {
*
*
* Represents the progress of the current stream.
- * Note: This value is under development and should not be used. Use
- * `fraction_consumed` instead.
*
*
* .google.cloud.bigquery.storage.v1beta1.Progress progress = 4;
@@ -14604,10 +14592,12 @@ public interface FinalizeStreamRequestOrBuilder
*
*
*
- * Stream to finalize.
+ * Required. Stream to finalize.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2 [(.google.api.field_behavior) = REQUIRED];
+ *
*
* @return Whether the stream field is set.
*/
@@ -14616,10 +14606,12 @@ public interface FinalizeStreamRequestOrBuilder
*
*
*
- * Stream to finalize.
+ * Required. Stream to finalize.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2 [(.google.api.field_behavior) = REQUIRED];
+ *
*
* @return The stream.
*/
@@ -14628,10 +14620,12 @@ public interface FinalizeStreamRequestOrBuilder
*
*
*
- * Stream to finalize.
+ * Required. Stream to finalize.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2 [(.google.api.field_behavior) = REQUIRED];
+ *
*/
com.google.cloud.bigquery.storage.v1beta1.Storage.StreamOrBuilder getStreamOrBuilder();
}
@@ -14743,10 +14737,12 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
*
*
*
- * Stream to finalize.
+ * Required. Stream to finalize.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2 [(.google.api.field_behavior) = REQUIRED];
+ *
*
* @return Whether the stream field is set.
*/
@@ -14757,10 +14753,12 @@ public boolean hasStream() {
*
*
*
- * Stream to finalize.
+ * Required. Stream to finalize.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2 [(.google.api.field_behavior) = REQUIRED];
+ *
*
* @return The stream.
*/
@@ -14773,10 +14771,12 @@ public com.google.cloud.bigquery.storage.v1beta1.Storage.Stream getStream() {
*
*
*
- * Stream to finalize.
+ * Required. Stream to finalize.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2 [(.google.api.field_behavior) = REQUIRED];
+ *
*/
public com.google.cloud.bigquery.storage.v1beta1.Storage.StreamOrBuilder getStreamOrBuilder() {
return getStream();
@@ -15141,10 +15141,12 @@ public Builder mergeFrom(
*
*
*
- * Stream to finalize.
+ * Required. Stream to finalize.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2 [(.google.api.field_behavior) = REQUIRED];
+ *
*
* @return Whether the stream field is set.
*/
@@ -15155,10 +15157,12 @@ public boolean hasStream() {
*
*
*
- * Stream to finalize.
+ * Required. Stream to finalize.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2 [(.google.api.field_behavior) = REQUIRED];
+ *
*
* @return The stream.
*/
@@ -15175,10 +15179,12 @@ public com.google.cloud.bigquery.storage.v1beta1.Storage.Stream getStream() {
*
*
*
- * Stream to finalize.
+ * Required. Stream to finalize.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2 [(.google.api.field_behavior) = REQUIRED];
+ *
*/
public Builder setStream(com.google.cloud.bigquery.storage.v1beta1.Storage.Stream value) {
if (streamBuilder_ == null) {
@@ -15197,10 +15203,12 @@ public Builder setStream(com.google.cloud.bigquery.storage.v1beta1.Storage.Strea
*
*
*
- * Stream to finalize.
+ * Required. Stream to finalize.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2 [(.google.api.field_behavior) = REQUIRED];
+ *
*/
public Builder setStream(
com.google.cloud.bigquery.storage.v1beta1.Storage.Stream.Builder builderForValue) {
@@ -15217,10 +15225,12 @@ public Builder setStream(
*
*
*
- * Stream to finalize.
+ * Required. Stream to finalize.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2 [(.google.api.field_behavior) = REQUIRED];
+ *
*/
public Builder mergeStream(com.google.cloud.bigquery.storage.v1beta1.Storage.Stream value) {
if (streamBuilder_ == null) {
@@ -15243,10 +15253,12 @@ public Builder mergeStream(com.google.cloud.bigquery.storage.v1beta1.Storage.Str
*
*
*
- * Stream to finalize.
+ * Required. Stream to finalize.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2 [(.google.api.field_behavior) = REQUIRED];
+ *
*/
public Builder clearStream() {
if (streamBuilder_ == null) {
@@ -15263,10 +15275,12 @@ public Builder clearStream() {
*
*
*
- * Stream to finalize.
+ * Required. Stream to finalize.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2 [(.google.api.field_behavior) = REQUIRED];
+ *
*/
public com.google.cloud.bigquery.storage.v1beta1.Storage.Stream.Builder getStreamBuilder() {
@@ -15277,10 +15291,12 @@ public com.google.cloud.bigquery.storage.v1beta1.Storage.Stream.Builder getStrea
*
*
*
- * Stream to finalize.
+ * Required. Stream to finalize.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2 [(.google.api.field_behavior) = REQUIRED];
+ *
*/
public com.google.cloud.bigquery.storage.v1beta1.Storage.StreamOrBuilder
getStreamOrBuilder() {
@@ -15296,10 +15312,12 @@ public com.google.cloud.bigquery.storage.v1beta1.Storage.Stream.Builder getStrea
*
*
*
- * Stream to finalize.
+ * Required. Stream to finalize.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream stream = 2 [(.google.api.field_behavior) = REQUIRED];
+ *
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.storage.v1beta1.Storage.Stream,
@@ -15383,10 +15401,12 @@ public interface SplitReadStreamRequestOrBuilder
*
*
*
- * Stream to split.
+ * Required. Stream to split.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
*
* @return Whether the originalStream field is set.
*/
@@ -15395,10 +15415,12 @@ public interface SplitReadStreamRequestOrBuilder
*
*
*
- * Stream to split.
+ * Required. Stream to split.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
*
* @return The originalStream.
*/
@@ -15407,10 +15429,12 @@ public interface SplitReadStreamRequestOrBuilder
*
*
*
- * Stream to split.
+ * Required. Stream to split.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
*/
com.google.cloud.bigquery.storage.v1beta1.Storage.StreamOrBuilder getOriginalStreamOrBuilder();
@@ -15546,10 +15570,12 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
*
*
*
- * Stream to split.
+ * Required. Stream to split.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
*
* @return Whether the originalStream field is set.
*/
@@ -15560,10 +15586,12 @@ public boolean hasOriginalStream() {
*
*
*
- * Stream to split.
+ * Required. Stream to split.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
*
* @return The originalStream.
*/
@@ -15576,10 +15604,12 @@ public com.google.cloud.bigquery.storage.v1beta1.Storage.Stream getOriginalStrea
*
*
*
- * Stream to split.
+ * Required. Stream to split.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
*/
public com.google.cloud.bigquery.storage.v1beta1.Storage.StreamOrBuilder
getOriginalStreamOrBuilder() {
@@ -15989,10 +16019,12 @@ public Builder mergeFrom(
*
*
*
- * Stream to split.
+ * Required. Stream to split.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
*
* @return Whether the originalStream field is set.
*/
@@ -16003,10 +16035,12 @@ public boolean hasOriginalStream() {
*
*
*
- * Stream to split.
+ * Required. Stream to split.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
*
* @return The originalStream.
*/
@@ -16023,10 +16057,12 @@ public com.google.cloud.bigquery.storage.v1beta1.Storage.Stream getOriginalStrea
*
*
*
- * Stream to split.
+ * Required. Stream to split.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
*/
public Builder setOriginalStream(
com.google.cloud.bigquery.storage.v1beta1.Storage.Stream value) {
@@ -16046,10 +16082,12 @@ public Builder setOriginalStream(
*
*
*
- * Stream to split.
+ * Required. Stream to split.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
*/
public Builder setOriginalStream(
com.google.cloud.bigquery.storage.v1beta1.Storage.Stream.Builder builderForValue) {
@@ -16066,10 +16104,12 @@ public Builder setOriginalStream(
*
*
*
- * Stream to split.
+ * Required. Stream to split.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
*/
public Builder mergeOriginalStream(
com.google.cloud.bigquery.storage.v1beta1.Storage.Stream value) {
@@ -16093,10 +16133,12 @@ public Builder mergeOriginalStream(
*
*
*
- * Stream to split.
+ * Required. Stream to split.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
*/
public Builder clearOriginalStream() {
if (originalStreamBuilder_ == null) {
@@ -16113,10 +16155,12 @@ public Builder clearOriginalStream() {
*
*
*
- * Stream to split.
+ * Required. Stream to split.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
*/
public com.google.cloud.bigquery.storage.v1beta1.Storage.Stream.Builder
getOriginalStreamBuilder() {
@@ -16128,10 +16172,12 @@ public Builder clearOriginalStream() {
*
*
*
- * Stream to split.
+ * Required. Stream to split.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
*/
public com.google.cloud.bigquery.storage.v1beta1.Storage.StreamOrBuilder
getOriginalStreamOrBuilder() {
@@ -16147,10 +16193,12 @@ public Builder clearOriginalStream() {
*
*
*
- * Stream to split.
+ * Required. Stream to split.
*
*
- * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1;
+ *
+ * .google.cloud.bigquery.storage.v1beta1.Stream original_stream = 1 [(.google.api.field_behavior) = REQUIRED];
+ *
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.storage.v1beta1.Storage.Stream,
@@ -17543,94 +17591,95 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
+ "gStrategy:k\352Ah\n*bigquerystorage.googleap"
+ "is.com/ReadSession\022:projects/{project}/l"
+ "ocations/{location}/sessions/{session}B\010"
- + "\n\006schema\"\325\003\n\030CreateReadSessionRequest\022S\n"
+ + "\n\006schema\"\205\004\n\030CreateReadSessionRequest\022S\n"
+ "\017table_reference\030\001 \001(\01325.google.cloud.bi"
+ "gquery.storage.v1beta1.TableReferenceB\003\340"
- + "A\002\022\023\n\006parent\030\006 \001(\tB\003\340A\002\022N\n\017table_modifie"
- + "rs\030\002 \001(\01325.google.cloud.bigquery.storage"
- + ".v1beta1.TableModifiers\022\031\n\021requested_str"
- + "eams\030\003 \001(\005\022M\n\014read_options\030\004 \001(\01327.googl"
- + "e.cloud.bigquery.storage.v1beta1.TableRe"
- + "adOptions\022A\n\006format\030\005 \001(\01621.google.cloud"
- + ".bigquery.storage.v1beta1.DataFormat\022R\n\021"
- + "sharding_strategy\030\007 \001(\01627.google.cloud.b"
- + "igquery.storage.v1beta1.ShardingStrategy"
- + "\"d\n\017ReadRowsRequest\022Q\n\rread_position\030\001 \001"
- + "(\01325.google.cloud.bigquery.storage.v1bet"
- + "a1.StreamPositionB\003\340A\002\"\240\001\n\014StreamStatus\022"
- + "\033\n\023estimated_row_count\030\001 \001(\003\022\031\n\021fraction"
- + "_consumed\030\002 \001(\002\022A\n\010progress\030\004 \001(\0132/.goog"
- + "le.cloud.bigquery.storage.v1beta1.Progre"
- + "ss\022\025\n\ris_splittable\030\003 \001(\010\">\n\010Progress\022\031\n"
- + "\021at_response_start\030\001 \001(\002\022\027\n\017at_response_"
- + "end\030\002 \001(\002\"*\n\016ThrottleStatus\022\030\n\020throttle_"
- + "percent\030\001 \001(\005\"\337\002\n\020ReadRowsResponse\022D\n\tav"
- + "ro_rows\030\003 \001(\0132/.google.cloud.bigquery.st"
- + "orage.v1beta1.AvroRowsH\000\022U\n\022arrow_record"
- + "_batch\030\004 \001(\01327.google.cloud.bigquery.sto"
- + "rage.v1beta1.ArrowRecordBatchH\000\022\021\n\trow_c"
- + "ount\030\006 \001(\003\022C\n\006status\030\002 \001(\01323.google.clou"
- + "d.bigquery.storage.v1beta1.StreamStatus\022"
- + "N\n\017throttle_status\030\005 \001(\01325.google.cloud."
- + "bigquery.storage.v1beta1.ThrottleStatusB"
- + "\006\n\004rows\"\220\001\n$BatchCreateReadSessionStream"
- + "sRequest\022H\n\007session\030\001 \001(\01322.google.cloud"
- + ".bigquery.storage.v1beta1.ReadSessionB\003\340"
- + "A\002\022\036\n\021requested_streams\030\002 \001(\005B\003\340A\002\"g\n%Ba"
- + "tchCreateReadSessionStreamsResponse\022>\n\007s"
- + "treams\030\001 \003(\0132-.google.cloud.bigquery.sto"
- + "rage.v1beta1.Stream\"V\n\025FinalizeStreamReq"
- + "uest\022=\n\006stream\030\002 \001(\0132-.google.cloud.bigq"
- + "uery.storage.v1beta1.Stream\"r\n\026SplitRead"
- + "StreamRequest\022F\n\017original_stream\030\001 \001(\0132-"
- + ".google.cloud.bigquery.storage.v1beta1.S"
- + "tream\022\020\n\010fraction\030\002 \001(\002\"\251\001\n\027SplitReadStr"
- + "eamResponse\022E\n\016primary_stream\030\001 \001(\0132-.go"
- + "ogle.cloud.bigquery.storage.v1beta1.Stre"
- + "am\022G\n\020remainder_stream\030\002 \001(\0132-.google.cl"
- + "oud.bigquery.storage.v1beta1.Stream*>\n\nD"
- + "ataFormat\022\033\n\027DATA_FORMAT_UNSPECIFIED\020\000\022\010"
- + "\n\004AVRO\020\001\022\t\n\005ARROW\020\003*O\n\020ShardingStrategy\022"
- + "!\n\035SHARDING_STRATEGY_UNSPECIFIED\020\000\022\n\n\006LI"
- + "QUID\020\001\022\014\n\010BALANCED\020\0022\353\n\n\017BigQueryStorage"
- + "\022\263\002\n\021CreateReadSession\022?.google.cloud.bi"
- + "gquery.storage.v1beta1.CreateReadSession"
- + "Request\0322.google.cloud.bigquery.storage."
- + "v1beta1.ReadSession\"\250\001\202\323\344\223\002w\"0/v1beta1/{"
- + "table_reference.project_id=projects/*}:\001"
- + "*Z@\";/v1beta1/{table_reference.dataset_i"
- + "d=projects/*/datasets/*}:\001*\332A(table_refe"
- + "rence,parent,requested_streams\022\320\001\n\010ReadR"
- + "ows\0226.google.cloud.bigquery.storage.v1be"
- + "ta1.ReadRowsRequest\0327.google.cloud.bigqu"
- + "ery.storage.v1beta1.ReadRowsResponse\"Q\202\323"
- + "\344\223\002;\0229/v1beta1/{read_position.stream.nam"
- + "e=projects/*/streams/*}\332A\rread_position0"
- + "\001\022\220\002\n\035BatchCreateReadSessionStreams\022K.go"
- + "ogle.cloud.bigquery.storage.v1beta1.Batc"
- + "hCreateReadSessionStreamsRequest\032L.googl"
- + "e.cloud.bigquery.storage.v1beta1.BatchCr"
- + "eateReadSessionStreamsResponse\"T\202\323\344\223\0022\"-"
- + "/v1beta1/{session.name=projects/*/sessio"
- + "ns/*}:\001*\332A\031session,requested_streams\022\247\001\n"
- + "\016FinalizeStream\022<.google.cloud.bigquery."
- + "storage.v1beta1.FinalizeStreamRequest\032\026."
- + "google.protobuf.Empty\"?\202\323\344\223\0020\"+/v1beta1/"
- + "{stream.name=projects/*/streams/*}:\001*\332A\006"
- + "stream\022\340\001\n\017SplitReadStream\022=.google.clou"
- + "d.bigquery.storage.v1beta1.SplitReadStre"
- + "amRequest\032>.google.cloud.bigquery.storag"
- + "e.v1beta1.SplitReadStreamResponse\"N\202\323\344\223\002"
- + "6\0224/v1beta1/{original_stream.name=projec"
- + "ts/*/streams/*}\332A\017original_stream\032\256\001\312A\036b"
- + "igquerystorage.googleapis.com\322A\211\001https:/"
- + "/www.googleapis.com/auth/bigquery,https:"
- + "//www.googleapis.com/auth/bigquery.reado"
- + "nly,https://www.googleapis.com/auth/clou"
- + "d-platformBy\n)com.google.cloud.bigquery."
- + "storage.v1beta1ZLgoogle.golang.org/genpr"
- + "oto/googleapis/cloud/bigquery/storage/v1"
- + "beta1;storageb\006proto3"
+ + "A\002\022C\n\006parent\030\006 \001(\tB3\340A\002\372A-\n+cloudresourc"
+ + "emanager.googleapis.com/Project\022N\n\017table"
+ + "_modifiers\030\002 \001(\01325.google.cloud.bigquery"
+ + ".storage.v1beta1.TableModifiers\022\031\n\021reque"
+ + "sted_streams\030\003 \001(\005\022M\n\014read_options\030\004 \001(\013"
+ + "27.google.cloud.bigquery.storage.v1beta1"
+ + ".TableReadOptions\022A\n\006format\030\005 \001(\01621.goog"
+ + "le.cloud.bigquery.storage.v1beta1.DataFo"
+ + "rmat\022R\n\021sharding_strategy\030\007 \001(\01627.google"
+ + ".cloud.bigquery.storage.v1beta1.Sharding"
+ + "Strategy\"d\n\017ReadRowsRequest\022Q\n\rread_posi"
+ + "tion\030\001 \001(\01325.google.cloud.bigquery.stora"
+ + "ge.v1beta1.StreamPositionB\003\340A\002\"\240\001\n\014Strea"
+ + "mStatus\022\033\n\023estimated_row_count\030\001 \001(\003\022\031\n\021"
+ + "fraction_consumed\030\002 \001(\002\022A\n\010progress\030\004 \001("
+ + "\0132/.google.cloud.bigquery.storage.v1beta"
+ + "1.Progress\022\025\n\ris_splittable\030\003 \001(\010\">\n\010Pro"
+ + "gress\022\031\n\021at_response_start\030\001 \001(\002\022\027\n\017at_r"
+ + "esponse_end\030\002 \001(\002\"*\n\016ThrottleStatus\022\030\n\020t"
+ + "hrottle_percent\030\001 \001(\005\"\337\002\n\020ReadRowsRespon"
+ + "se\022D\n\tavro_rows\030\003 \001(\0132/.google.cloud.big"
+ + "query.storage.v1beta1.AvroRowsH\000\022U\n\022arro"
+ + "w_record_batch\030\004 \001(\01327.google.cloud.bigq"
+ + "uery.storage.v1beta1.ArrowRecordBatchH\000\022"
+ + "\021\n\trow_count\030\006 \001(\003\022C\n\006status\030\002 \001(\01323.goo"
+ + "gle.cloud.bigquery.storage.v1beta1.Strea"
+ + "mStatus\022N\n\017throttle_status\030\005 \001(\01325.googl"
+ + "e.cloud.bigquery.storage.v1beta1.Throttl"
+ + "eStatusB\006\n\004rows\"\220\001\n$BatchCreateReadSessi"
+ + "onStreamsRequest\022H\n\007session\030\001 \001(\01322.goog"
+ + "le.cloud.bigquery.storage.v1beta1.ReadSe"
+ + "ssionB\003\340A\002\022\036\n\021requested_streams\030\002 \001(\005B\003\340"
+ + "A\002\"g\n%BatchCreateReadSessionStreamsRespo"
+ + "nse\022>\n\007streams\030\001 \003(\0132-.google.cloud.bigq"
+ + "uery.storage.v1beta1.Stream\"[\n\025FinalizeS"
+ + "treamRequest\022B\n\006stream\030\002 \001(\0132-.google.cl"
+ + "oud.bigquery.storage.v1beta1.StreamB\003\340A\002"
+ + "\"w\n\026SplitReadStreamRequest\022K\n\017original_s"
+ + "tream\030\001 \001(\0132-.google.cloud.bigquery.stor"
+ + "age.v1beta1.StreamB\003\340A\002\022\020\n\010fraction\030\002 \001("
+ + "\002\"\251\001\n\027SplitReadStreamResponse\022E\n\016primary"
+ + "_stream\030\001 \001(\0132-.google.cloud.bigquery.st"
+ + "orage.v1beta1.Stream\022G\n\020remainder_stream"
+ + "\030\002 \001(\0132-.google.cloud.bigquery.storage.v"
+ + "1beta1.Stream*>\n\nDataFormat\022\033\n\027DATA_FORM"
+ + "AT_UNSPECIFIED\020\000\022\010\n\004AVRO\020\001\022\t\n\005ARROW\020\003*O\n"
+ + "\020ShardingStrategy\022!\n\035SHARDING_STRATEGY_U"
+ + "NSPECIFIED\020\000\022\n\n\006LIQUID\020\001\022\014\n\010BALANCED\020\0022\353"
+ + "\n\n\017BigQueryStorage\022\263\002\n\021CreateReadSession"
+ + "\022?.google.cloud.bigquery.storage.v1beta1"
+ + ".CreateReadSessionRequest\0322.google.cloud"
+ + ".bigquery.storage.v1beta1.ReadSession\"\250\001"
+ + "\202\323\344\223\002w\"0/v1beta1/{table_reference.projec"
+ + "t_id=projects/*}:\001*Z@\";/v1beta1/{table_r"
+ + "eference.dataset_id=projects/*/datasets/"
+ + "*}:\001*\332A(table_reference,parent,requested"
+ + "_streams\022\320\001\n\010ReadRows\0226.google.cloud.big"
+ + "query.storage.v1beta1.ReadRowsRequest\0327."
+ + "google.cloud.bigquery.storage.v1beta1.Re"
+ + "adRowsResponse\"Q\202\323\344\223\002;\0229/v1beta1/{read_p"
+ + "osition.stream.name=projects/*/streams/*"
+ + "}\332A\rread_position0\001\022\220\002\n\035BatchCreateReadS"
+ + "essionStreams\022K.google.cloud.bigquery.st"
+ + "orage.v1beta1.BatchCreateReadSessionStre"
+ + "amsRequest\032L.google.cloud.bigquery.stora"
+ + "ge.v1beta1.BatchCreateReadSessionStreams"
+ + "Response\"T\202\323\344\223\0022\"-/v1beta1/{session.name"
+ + "=projects/*/sessions/*}:\001*\332A\031session,req"
+ + "uested_streams\022\247\001\n\016FinalizeStream\022<.goog"
+ + "le.cloud.bigquery.storage.v1beta1.Finali"
+ + "zeStreamRequest\032\026.google.protobuf.Empty\""
+ + "?\202\323\344\223\0020\"+/v1beta1/{stream.name=projects/"
+ + "*/streams/*}:\001*\332A\006stream\022\340\001\n\017SplitReadSt"
+ + "ream\022=.google.cloud.bigquery.storage.v1b"
+ + "eta1.SplitReadStreamRequest\032>.google.clo"
+ + "ud.bigquery.storage.v1beta1.SplitReadStr"
+ + "eamResponse\"N\202\323\344\223\0026\0224/v1beta1/{original_"
+ + "stream.name=projects/*/streams/*}\332A\017orig"
+ + "inal_stream\032\256\001\312A\036bigquerystorage.googlea"
+ + "pis.com\322A\211\001https://www.googleapis.com/au"
+ + "th/bigquery,https://www.googleapis.com/a"
+ + "uth/bigquery.readonly,https://www.google"
+ + "apis.com/auth/cloud-platformBy\n)com.goog"
+ + "le.cloud.bigquery.storage.v1beta1ZLgoogl"
+ + "e.golang.org/genproto/googleapis/cloud/b"
+ + "igquery/storage/v1beta1;storageb\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
@@ -17781,6 +17830,7 @@ public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
registry.add(com.google.api.ClientProto.methodSignature);
registry.add(com.google.api.ClientProto.oauthScopes);
registry.add(com.google.api.ResourceProto.resource);
+ registry.add(com.google.api.ResourceProto.resourceReference);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.AnnotationsProto.getDescriptor();
diff --git a/proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/arrow.proto b/proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/arrow.proto
index 3003de444c..f70c61c724 100644
--- a/proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/arrow.proto
+++ b/proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/arrow.proto
@@ -1,4 +1,4 @@
-// Copyright 2019 Google LLC.
+// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -11,7 +11,6 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
-//
syntax = "proto3";
diff --git a/proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/avro.proto b/proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/avro.proto
index 021d8e44f9..7d034a28a7 100644
--- a/proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/avro.proto
+++ b/proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/avro.proto
@@ -1,4 +1,4 @@
-// Copyright 2019 Google LLC.
+// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -11,7 +11,6 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
-//
syntax = "proto3";
diff --git a/proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/read_options.proto b/proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/read_options.proto
index 8ed9b73f6c..1ff8d8b5eb 100644
--- a/proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/read_options.proto
+++ b/proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/read_options.proto
@@ -1,4 +1,4 @@
-// Copyright 2019 Google LLC.
+// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -11,7 +11,6 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
-//
syntax = "proto3";
diff --git a/proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/storage.proto b/proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/storage.proto
index 22f742fbb6..81e77c73af 100644
--- a/proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/storage.proto
+++ b/proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/storage.proto
@@ -1,4 +1,4 @@
-// Copyright 2019 Google LLC.
+// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -11,7 +11,6 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
-//
syntax = "proto3";
@@ -204,7 +203,12 @@ message CreateReadSessionRequest {
// Required. String of the form `projects/{project_id}` indicating the
// project this ReadSession is associated with. This is the project that will
// be billed for usage.
- string parent = 6 [(google.api.field_behavior) = REQUIRED];
+ string parent = 6 [
+ (google.api.field_behavior) = REQUIRED,
+ (google.api.resource_reference) = {
+ type: "cloudresourcemanager.googleapis.com/Project"
+ }
+ ];
// Any modifiers to the Table (e.g. snapshot timestamp).
TableModifiers table_modifiers = 2;
@@ -286,9 +290,6 @@ message StreamStatus {
float fraction_consumed = 2;
// Represents the progress of the current stream.
- //
- // Note: This value is under development and should not be used. Use
- // `fraction_consumed` instead.
Progress progress = 4;
// Whether this stream can be split. For sessions that use the LIQUID sharding
@@ -373,14 +374,14 @@ message BatchCreateReadSessionStreamsResponse {
// Request information for invoking `FinalizeStream`.
message FinalizeStreamRequest {
- // Stream to finalize.
- Stream stream = 2;
+ // Required. Stream to finalize.
+ Stream stream = 2 [(google.api.field_behavior) = REQUIRED];
}
// Request information for `SplitReadStream`.
message SplitReadStreamRequest {
- // Stream to split.
- Stream original_stream = 1;
+ // Required. Stream to split.
+ Stream original_stream = 1 [(google.api.field_behavior) = REQUIRED];
// A value in the range (0.0, 1.0) that specifies the fractional point at
// which the original stream should be split. The actual split point is
diff --git a/proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/table_reference.proto b/proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/table_reference.proto
index a55dc48eb0..4269392f67 100644
--- a/proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/table_reference.proto
+++ b/proto-google-cloud-bigquerystorage-v1beta1/src/main/proto/google/cloud/bigquery/storage/v1beta1/table_reference.proto
@@ -1,4 +1,4 @@
-// Copyright 2019 Google LLC.
+// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -11,7 +11,6 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
-//
syntax = "proto3";
diff --git a/proto-google-cloud-bigquerystorage-v1beta2/pom.xml b/proto-google-cloud-bigquerystorage-v1beta2/pom.xml
index ebbfa155ec..0f207e9262 100644
--- a/proto-google-cloud-bigquerystorage-v1beta2/pom.xml
+++ b/proto-google-cloud-bigquerystorage-v1beta2/pom.xml
@@ -4,13 +4,13 @@
4.0.0
com.google.api.grpc
proto-google-cloud-bigquerystorage-v1beta2
- 0.96.1
+ 0.96.2
proto-google-cloud-bigquerystorage-v1beta2
PROTO library for proto-google-cloud-bigquerystorage-v1beta2
com.google.cloud
google-cloud-bigquerystorage-parent
- 0.131.1-beta
+ 0.131.2-beta
@@ -20,6 +20,14 @@
com.google.api.grpc
proto-google-common-protos
+
+
+ com.google.api
+ api-common
+
+
+ com.google.guava
+ guava
diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ProjectName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ProjectName.java
new file mode 100644
index 0000000000..3ca1b0df14
--- /dev/null
+++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ProjectName.java
@@ -0,0 +1,162 @@
+/*
+ * Copyright 2020 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery.storage.v1beta2;
+
+import com.google.api.pathtemplate.PathTemplate;
+import com.google.api.resourcenames.ResourceName;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableMap;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/** AUTO-GENERATED DOCUMENTATION AND CLASS */
+@javax.annotation.Generated("by GAPIC protoc plugin")
+public class ProjectName implements ResourceName {
+
+ private static final PathTemplate PATH_TEMPLATE =
+ PathTemplate.createWithoutUrlEncoding("projects/{project}");
+
+ private volatile Map fieldValuesMap;
+
+ private final String project;
+
+ public String getProject() {
+ return project;
+ }
+
+ public static Builder newBuilder() {
+ return new Builder();
+ }
+
+ public Builder toBuilder() {
+ return new Builder(this);
+ }
+
+ private ProjectName(Builder builder) {
+ project = Preconditions.checkNotNull(builder.getProject());
+ }
+
+ public static ProjectName of(String project) {
+ return newBuilder().setProject(project).build();
+ }
+
+ public static String format(String project) {
+ return newBuilder().setProject(project).build().toString();
+ }
+
+ public static ProjectName parse(String formattedString) {
+ if (formattedString.isEmpty()) {
+ return null;
+ }
+ Map matchMap =
+ PATH_TEMPLATE.validatedMatch(
+ formattedString, "ProjectName.parse: formattedString not in valid format");
+ return of(matchMap.get("project"));
+ }
+
+ public static List parseList(List formattedStrings) {
+ List list = new ArrayList<>(formattedStrings.size());
+ for (String formattedString : formattedStrings) {
+ list.add(parse(formattedString));
+ }
+ return list;
+ }
+
+ public static List toStringList(List values) {
+ List list = new ArrayList(values.size());
+ for (ProjectName value : values) {
+ if (value == null) {
+ list.add("");
+ } else {
+ list.add(value.toString());
+ }
+ }
+ return list;
+ }
+
+ public static boolean isParsableFrom(String formattedString) {
+ return PATH_TEMPLATE.matches(formattedString);
+ }
+
+ public Map getFieldValuesMap() {
+ if (fieldValuesMap == null) {
+ synchronized (this) {
+ if (fieldValuesMap == null) {
+ ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder();
+ fieldMapBuilder.put("project", project);
+ fieldValuesMap = fieldMapBuilder.build();
+ }
+ }
+ }
+ return fieldValuesMap;
+ }
+
+ public String getFieldValue(String fieldName) {
+ return getFieldValuesMap().get(fieldName);
+ }
+
+ @Override
+ public String toString() {
+ return PATH_TEMPLATE.instantiate("project", project);
+ }
+
+ /** Builder for ProjectName. */
+ public static class Builder {
+
+ private String project;
+
+ public String getProject() {
+ return project;
+ }
+
+ public Builder setProject(String project) {
+ this.project = project;
+ return this;
+ }
+
+ private Builder() {}
+
+ private Builder(ProjectName projectName) {
+ project = projectName.project;
+ }
+
+ public ProjectName build() {
+ return new ProjectName(this);
+ }
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == this) {
+ return true;
+ }
+ if (o instanceof ProjectName) {
+ ProjectName that = (ProjectName) o;
+ return (this.project.equals(that.project));
+ }
+ return false;
+ }
+
+ @Override
+ public int hashCode() {
+ int h = 1;
+ h *= 1000003;
+ h ^= project.hashCode();
+ return h;
+ }
+}
diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java
new file mode 100644
index 0000000000..46c8731d94
--- /dev/null
+++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadSessionName.java
@@ -0,0 +1,210 @@
+/*
+ * Copyright 2020 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery.storage.v1beta2;
+
+import com.google.api.pathtemplate.PathTemplate;
+import com.google.api.resourcenames.ResourceName;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableMap;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/** AUTO-GENERATED DOCUMENTATION AND CLASS */
+@javax.annotation.Generated("by GAPIC protoc plugin")
+public class ReadSessionName implements ResourceName {
+
+ private static final PathTemplate PATH_TEMPLATE =
+ PathTemplate.createWithoutUrlEncoding(
+ "projects/{project}/locations/{location}/sessions/{session}");
+
+ private volatile Map fieldValuesMap;
+
+ private final String project;
+ private final String location;
+ private final String session;
+
+ public String getProject() {
+ return project;
+ }
+
+ public String getLocation() {
+ return location;
+ }
+
+ public String getSession() {
+ return session;
+ }
+
+ public static Builder newBuilder() {
+ return new Builder();
+ }
+
+ public Builder toBuilder() {
+ return new Builder(this);
+ }
+
+ private ReadSessionName(Builder builder) {
+ project = Preconditions.checkNotNull(builder.getProject());
+ location = Preconditions.checkNotNull(builder.getLocation());
+ session = Preconditions.checkNotNull(builder.getSession());
+ }
+
+ public static ReadSessionName of(String project, String location, String session) {
+ return newBuilder().setProject(project).setLocation(location).setSession(session).build();
+ }
+
+ public static String format(String project, String location, String session) {
+ return newBuilder()
+ .setProject(project)
+ .setLocation(location)
+ .setSession(session)
+ .build()
+ .toString();
+ }
+
+ public static ReadSessionName parse(String formattedString) {
+ if (formattedString.isEmpty()) {
+ return null;
+ }
+ Map matchMap =
+ PATH_TEMPLATE.validatedMatch(
+ formattedString, "ReadSessionName.parse: formattedString not in valid format");
+ return of(matchMap.get("project"), matchMap.get("location"), matchMap.get("session"));
+ }
+
+ public static List parseList(List formattedStrings) {
+ List list = new ArrayList<>(formattedStrings.size());
+ for (String formattedString : formattedStrings) {
+ list.add(parse(formattedString));
+ }
+ return list;
+ }
+
+ public static List toStringList(List values) {
+ List list = new ArrayList(values.size());
+ for (ReadSessionName value : values) {
+ if (value == null) {
+ list.add("");
+ } else {
+ list.add(value.toString());
+ }
+ }
+ return list;
+ }
+
+ public static boolean isParsableFrom(String formattedString) {
+ return PATH_TEMPLATE.matches(formattedString);
+ }
+
+ public Map getFieldValuesMap() {
+ if (fieldValuesMap == null) {
+ synchronized (this) {
+ if (fieldValuesMap == null) {
+ ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder();
+ fieldMapBuilder.put("project", project);
+ fieldMapBuilder.put("location", location);
+ fieldMapBuilder.put("session", session);
+ fieldValuesMap = fieldMapBuilder.build();
+ }
+ }
+ }
+ return fieldValuesMap;
+ }
+
+ public String getFieldValue(String fieldName) {
+ return getFieldValuesMap().get(fieldName);
+ }
+
+ @Override
+ public String toString() {
+ return PATH_TEMPLATE.instantiate("project", project, "location", location, "session", session);
+ }
+
+ /** Builder for ReadSessionName. */
+ public static class Builder {
+
+ private String project;
+ private String location;
+ private String session;
+
+ public String getProject() {
+ return project;
+ }
+
+ public String getLocation() {
+ return location;
+ }
+
+ public String getSession() {
+ return session;
+ }
+
+ public Builder setProject(String project) {
+ this.project = project;
+ return this;
+ }
+
+ public Builder setLocation(String location) {
+ this.location = location;
+ return this;
+ }
+
+ public Builder setSession(String session) {
+ this.session = session;
+ return this;
+ }
+
+ private Builder() {}
+
+ private Builder(ReadSessionName readSessionName) {
+ project = readSessionName.project;
+ location = readSessionName.location;
+ session = readSessionName.session;
+ }
+
+ public ReadSessionName build() {
+ return new ReadSessionName(this);
+ }
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == this) {
+ return true;
+ }
+ if (o instanceof ReadSessionName) {
+ ReadSessionName that = (ReadSessionName) o;
+ return (this.project.equals(that.project))
+ && (this.location.equals(that.location))
+ && (this.session.equals(that.session));
+ }
+ return false;
+ }
+
+ @Override
+ public int hashCode() {
+ int h = 1;
+ h *= 1000003;
+ h ^= project.hashCode();
+ h *= 1000003;
+ h ^= location.hashCode();
+ h *= 1000003;
+ h ^= session.hashCode();
+ return h;
+ }
+}
diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java
new file mode 100644
index 0000000000..cd616ebba7
--- /dev/null
+++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/ReadStreamName.java
@@ -0,0 +1,242 @@
+/*
+ * Copyright 2020 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery.storage.v1beta2;
+
+import com.google.api.pathtemplate.PathTemplate;
+import com.google.api.resourcenames.ResourceName;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableMap;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/** AUTO-GENERATED DOCUMENTATION AND CLASS */
+@javax.annotation.Generated("by GAPIC protoc plugin")
+public class ReadStreamName implements ResourceName {
+
+ private static final PathTemplate PATH_TEMPLATE =
+ PathTemplate.createWithoutUrlEncoding(
+ "projects/{project}/locations/{location}/sessions/{session}/streams/{stream}");
+
+ private volatile Map fieldValuesMap;
+
+ private final String project;
+ private final String location;
+ private final String session;
+ private final String stream;
+
+ public String getProject() {
+ return project;
+ }
+
+ public String getLocation() {
+ return location;
+ }
+
+ public String getSession() {
+ return session;
+ }
+
+ public String getStream() {
+ return stream;
+ }
+
+ public static Builder newBuilder() {
+ return new Builder();
+ }
+
+ public Builder toBuilder() {
+ return new Builder(this);
+ }
+
+ private ReadStreamName(Builder builder) {
+ project = Preconditions.checkNotNull(builder.getProject());
+ location = Preconditions.checkNotNull(builder.getLocation());
+ session = Preconditions.checkNotNull(builder.getSession());
+ stream = Preconditions.checkNotNull(builder.getStream());
+ }
+
+ public static ReadStreamName of(String project, String location, String session, String stream) {
+ return newBuilder()
+ .setProject(project)
+ .setLocation(location)
+ .setSession(session)
+ .setStream(stream)
+ .build();
+ }
+
+ public static String format(String project, String location, String session, String stream) {
+ return newBuilder()
+ .setProject(project)
+ .setLocation(location)
+ .setSession(session)
+ .setStream(stream)
+ .build()
+ .toString();
+ }
+
+ public static ReadStreamName parse(String formattedString) {
+ if (formattedString.isEmpty()) {
+ return null;
+ }
+ Map matchMap =
+ PATH_TEMPLATE.validatedMatch(
+ formattedString, "ReadStreamName.parse: formattedString not in valid format");
+ return of(
+ matchMap.get("project"),
+ matchMap.get("location"),
+ matchMap.get("session"),
+ matchMap.get("stream"));
+ }
+
+ public static List parseList(List formattedStrings) {
+ List list = new ArrayList<>(formattedStrings.size());
+ for (String formattedString : formattedStrings) {
+ list.add(parse(formattedString));
+ }
+ return list;
+ }
+
+ public static List toStringList(List values) {
+ List list = new ArrayList(values.size());
+ for (ReadStreamName value : values) {
+ if (value == null) {
+ list.add("");
+ } else {
+ list.add(value.toString());
+ }
+ }
+ return list;
+ }
+
+ public static boolean isParsableFrom(String formattedString) {
+ return PATH_TEMPLATE.matches(formattedString);
+ }
+
+ public Map getFieldValuesMap() {
+ if (fieldValuesMap == null) {
+ synchronized (this) {
+ if (fieldValuesMap == null) {
+ ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder();
+ fieldMapBuilder.put("project", project);
+ fieldMapBuilder.put("location", location);
+ fieldMapBuilder.put("session", session);
+ fieldMapBuilder.put("stream", stream);
+ fieldValuesMap = fieldMapBuilder.build();
+ }
+ }
+ }
+ return fieldValuesMap;
+ }
+
+ public String getFieldValue(String fieldName) {
+ return getFieldValuesMap().get(fieldName);
+ }
+
+ @Override
+ public String toString() {
+ return PATH_TEMPLATE.instantiate(
+ "project", project, "location", location, "session", session, "stream", stream);
+ }
+
+ /** Builder for ReadStreamName. */
+ public static class Builder {
+
+ private String project;
+ private String location;
+ private String session;
+ private String stream;
+
+ public String getProject() {
+ return project;
+ }
+
+ public String getLocation() {
+ return location;
+ }
+
+ public String getSession() {
+ return session;
+ }
+
+ public String getStream() {
+ return stream;
+ }
+
+ public Builder setProject(String project) {
+ this.project = project;
+ return this;
+ }
+
+ public Builder setLocation(String location) {
+ this.location = location;
+ return this;
+ }
+
+ public Builder setSession(String session) {
+ this.session = session;
+ return this;
+ }
+
+ public Builder setStream(String stream) {
+ this.stream = stream;
+ return this;
+ }
+
+ private Builder() {}
+
+ private Builder(ReadStreamName readStreamName) {
+ project = readStreamName.project;
+ location = readStreamName.location;
+ session = readStreamName.session;
+ stream = readStreamName.stream;
+ }
+
+ public ReadStreamName build() {
+ return new ReadStreamName(this);
+ }
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == this) {
+ return true;
+ }
+ if (o instanceof ReadStreamName) {
+ ReadStreamName that = (ReadStreamName) o;
+ return (this.project.equals(that.project))
+ && (this.location.equals(that.location))
+ && (this.session.equals(that.session))
+ && (this.stream.equals(that.stream));
+ }
+ return false;
+ }
+
+ @Override
+ public int hashCode() {
+ int h = 1;
+ h *= 1000003;
+ h ^= project.hashCode();
+ h *= 1000003;
+ h ^= location.hashCode();
+ h *= 1000003;
+ h ^= session.hashCode();
+ h *= 1000003;
+ h ^= stream.hashCode();
+ return h;
+ }
+}
diff --git a/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/TableName.java b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/TableName.java
new file mode 100644
index 0000000000..476d710941
--- /dev/null
+++ b/proto-google-cloud-bigquerystorage-v1beta2/src/main/java/com/google/cloud/bigquery/storage/v1beta2/TableName.java
@@ -0,0 +1,204 @@
+/*
+ * Copyright 2020 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery.storage.v1beta2;
+
+import com.google.api.pathtemplate.PathTemplate;
+import com.google.api.resourcenames.ResourceName;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableMap;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/** AUTO-GENERATED DOCUMENTATION AND CLASS */
+@javax.annotation.Generated("by GAPIC protoc plugin")
+public class TableName implements ResourceName {
+
+ private static final PathTemplate PATH_TEMPLATE =
+ PathTemplate.createWithoutUrlEncoding("projects/{project}/datasets/{dataset}/tables/{table}");
+
+ private volatile Map fieldValuesMap;
+
+ private final String project;
+ private final String dataset;
+ private final String table;
+
+ public String getProject() {
+ return project;
+ }
+
+ public String getDataset() {
+ return dataset;
+ }
+
+ public String getTable() {
+ return table;
+ }
+
+ public static Builder newBuilder() {
+ return new Builder();
+ }
+
+ public Builder toBuilder() {
+ return new Builder(this);
+ }
+
+ private TableName(Builder builder) {
+ project = Preconditions.checkNotNull(builder.getProject());
+ dataset = Preconditions.checkNotNull(builder.getDataset());
+ table = Preconditions.checkNotNull(builder.getTable());
+ }
+
+ public static TableName of(String project, String dataset, String table) {
+ return newBuilder().setProject(project).setDataset(dataset).setTable(table).build();
+ }
+
+ public static String format(String project, String dataset, String table) {
+ return newBuilder().setProject(project).setDataset(dataset).setTable(table).build().toString();
+ }
+
+ public static TableName parse(String formattedString) {
+ if (formattedString.isEmpty()) {
+ return null;
+ }
+ Map matchMap =
+ PATH_TEMPLATE.validatedMatch(
+ formattedString, "TableName.parse: formattedString not in valid format");
+ return of(matchMap.get("project"), matchMap.get("dataset"), matchMap.get("table"));
+ }
+
+ public static List parseList(List formattedStrings) {
+ List list = new ArrayList<>(formattedStrings.size());
+ for (String formattedString : formattedStrings) {
+ list.add(parse(formattedString));
+ }
+ return list;
+ }
+
+ public static List toStringList(List values) {
+ List list = new ArrayList(values.size());
+ for (TableName value : values) {
+ if (value == null) {
+ list.add("");
+ } else {
+ list.add(value.toString());
+ }
+ }
+ return list;
+ }
+
+ public static boolean isParsableFrom(String formattedString) {
+ return PATH_TEMPLATE.matches(formattedString);
+ }
+
+ public Map getFieldValuesMap() {
+ if (fieldValuesMap == null) {
+ synchronized (this) {
+ if (fieldValuesMap == null) {
+ ImmutableMap.Builder fieldMapBuilder = ImmutableMap.builder();
+ fieldMapBuilder.put("project", project);
+ fieldMapBuilder.put("dataset", dataset);
+ fieldMapBuilder.put("table", table);
+ fieldValuesMap = fieldMapBuilder.build();
+ }
+ }
+ }
+ return fieldValuesMap;
+ }
+
+ public String getFieldValue(String fieldName) {
+ return getFieldValuesMap().get(fieldName);
+ }
+
+ @Override
+ public String toString() {
+ return PATH_TEMPLATE.instantiate("project", project, "dataset", dataset, "table", table);
+ }
+
+ /** Builder for TableName. */
+ public static class Builder {
+
+ private String project;
+ private String dataset;
+ private String table;
+
+ public String getProject() {
+ return project;
+ }
+
+ public String getDataset() {
+ return dataset;
+ }
+
+ public String getTable() {
+ return table;
+ }
+
+ public Builder setProject(String project) {
+ this.project = project;
+ return this;
+ }
+
+ public Builder setDataset(String dataset) {
+ this.dataset = dataset;
+ return this;
+ }
+
+ public Builder setTable(String table) {
+ this.table = table;
+ return this;
+ }
+
+ private Builder() {}
+
+ private Builder(TableName tableName) {
+ project = tableName.project;
+ dataset = tableName.dataset;
+ table = tableName.table;
+ }
+
+ public TableName build() {
+ return new TableName(this);
+ }
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == this) {
+ return true;
+ }
+ if (o instanceof TableName) {
+ TableName that = (TableName) o;
+ return (this.project.equals(that.project))
+ && (this.dataset.equals(that.dataset))
+ && (this.table.equals(that.table));
+ }
+ return false;
+ }
+
+ @Override
+ public int hashCode() {
+ int h = 1;
+ h *= 1000003;
+ h ^= project.hashCode();
+ h *= 1000003;
+ h ^= dataset.hashCode();
+ h *= 1000003;
+ h ^= table.hashCode();
+ return h;
+ }
+}
diff --git a/samples/install-without-bom/pom.xml b/samples/install-without-bom/pom.xml
index 5251248ec1..70c249d247 100644
--- a/samples/install-without-bom/pom.xml
+++ b/samples/install-without-bom/pom.xml
@@ -29,7 +29,7 @@
com.google.cloud
google-cloud-bigquerystorage
- 0.131.0-beta
+ 0.131.1-beta
diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml
index aae487f47a..d55fb887f3 100644
--- a/samples/snapshot/pom.xml
+++ b/samples/snapshot/pom.xml
@@ -28,7 +28,7 @@
com.google.cloud
google-cloud-bigquerystorage
- 0.131.1-beta
+ 0.131.2-beta
diff --git a/synth.metadata b/synth.metadata
index 3bc8037095..0758ba5c4f 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -10,49 +10,47 @@
{
"git": {
"name": ".",
- "remote": "git@github.com:stephaniewang526/java-bigquerystorage.git",
- "sha": "c6d9d9ac2c2ef5a901525c9adda64082d826e67e"
+ "remote": "https://github.com/googleapis/java-bigquerystorage.git",
+ "sha": "252440a84d45d9c13e468e7b59fe4702499143a9"
}
},
{
"git": {
"name": "googleapis",
"remote": "https://github.com/googleapis/googleapis.git",
- "sha": "aaff764c185e18a6c73227357c3df5fa60fec85a",
- "internalRef": "309426927"
+ "sha": "2fc2caaacb15949c7f80426bfc7dafdd41dbc333",
+ "internalRef": "310239576"
}
},
{
"git": {
"name": "googleapis",
"remote": "https://github.com/googleapis/googleapis.git",
- "sha": "aaff764c185e18a6c73227357c3df5fa60fec85a",
- "internalRef": "309426927"
+ "sha": "2fc2caaacb15949c7f80426bfc7dafdd41dbc333",
+ "internalRef": "310239576"
}
},
{
"git": {
"name": "googleapis",
"remote": "https://github.com/googleapis/googleapis.git",
- "sha": "aaff764c185e18a6c73227357c3df5fa60fec85a",
- "internalRef": "309426927"
+ "sha": "2fc2caaacb15949c7f80426bfc7dafdd41dbc333",
+ "internalRef": "310239576"
}
},
{
"git": {
"name": "googleapis",
"remote": "https://github.com/googleapis/googleapis.git",
- "sha": "aaff764c185e18a6c73227357c3df5fa60fec85a",
- "internalRef": "309426927",
- "log": "aaff764c185e18a6c73227357c3df5fa60fec85a\nchore: enable gapicv2 and annotate protos for recommendationengine/v1beta1 API\n\nCommitter: @miraleung\nPiperOrigin-RevId: 309426927\n\nf78da0d54c78aa8f66d52a448db1f7ec0e0fd591\nfix: replace all retry configs removed during GAPIC config migration\n\nPiperOrigin-RevId: 309415646\n\n7e1c7603c8cb2f05ef29842405654ff8a9d3d108\nAdd an OAuth scope annotation in build_service proto file\n\nPiperOrigin-RevId: 309058279\n\n3f5f8a2258c6a41f9fbf7b80acbca631dda0a952\nfeat: added support for project id in scope\nfeat: added support for order_by in SearchAllResources rpc\nfeat: added support for search on location, labels and networkTags fields\ndocs: updated according to above features\n\nPiperOrigin-RevId: 308922843\n\n756b174de4a122461993c1c583345533d819936d\nfix: Add missing method_signature annotations for BigTable Admin Backup RPCs\n\nThe added method_signatures reflect method flattenings in the GAPIC v1 config.\n\nPiperOrigin-RevId: 308824110\n\nc284e4f849e4b085a297d336cef8721ab69ba013\nConfigure retry for Diaglogflow v2.Environments.ListEnvironments and v2beta1.Environments.ListEnvironments.\n\nPiperOrigin-RevId: 308749074\n\nff17eefa184f6985e5d9a8ee3e725da95a06cd23\nchore: add Python target for orgpolicy BUILD.bazel\n\nPiperOrigin-RevId: 308747658\n\n371206424e6db4df2cb3734bb1013dcaac9dbe4b\nRemove raw html from proto comments as this breaks client library documentation generators.\n\nSee https://aip.dev/192\n\n> Any formatting in comments must be in CommonMark. Headings and tables must not be used, as these cause problems for several tools, and are unsuitable for client library reference documentation.\n>\n> Comments should use code font for property names and for literals (such as true).\n>\n> Raw HTML must not be used.\n\nPiperOrigin-RevId: 308716969\n\nc2bfceedfc9f01356229bd206ad25e250aa9551c\nGenerating BUILD.bazel for maps/routes/v1alpha.\n\nPiperOrigin-RevId: 308691010\n\n5dca7f1079a4c127245c0f1dbb8293b434fbd0c8\nSupport purchasing capacity commitments in the BigQuery Reservation v1 API.\n\nPiperOrigin-RevId: 308671886\n\n"
+ "sha": "2fc2caaacb15949c7f80426bfc7dafdd41dbc333",
+ "internalRef": "310239576"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "cdddf139b36000b3a7c65fd2a7781e253262359a",
- "log": "cdddf139b36000b3a7c65fd2a7781e253262359a\nfeat: implement multiple version for nodejs lib (#504)\n\n* feat: synthool generate src/index for nodejs client library\n\n* fix the linters\n\n* merge upstream\n\n* remove '-' in jinja tempalte\n\n* rename the functions\n\n* add try, finally to avoid change direcory\n\n* optional for versions and default verison\n958a803ee0c43523fa3e6e0bb016276a69614c8e\nfix: update requirements.txt (#516)\n\n* fix: remove version update check\n\nbecause autosynth now directly calls the synthtool code in this repo.\n\n* fix: add 'deprecation' dot requirements.txt\n\n* fix: add `protobuf` to requirements.txt\n\n* lint\n6b685a261479e27fd959dd7b1ff253d6cf595172\nfix: synthtool path (#515)\n\n\n46fb62a3ebda62cc6c1c98e4f1eaf91965eab580\nfix: kokoro path (#514)\n\n* fix: path to .kokoro-autosynth\n\n* fix: path in ruby, dotnet, and nodejs cnfgs\n7c1a92d00de654db2fb440243129c61393a694e2\nfix: path to build.sh (#513)\n\n\n720b7bff0da03ed9e67cb2362f2d291241fa59ae\nfix: typo (#512)\n\n\n969a2340e74c73227e7c1638ed7650abcac22ee4\nchore: merge autosynth repo into this repo (#508)\n\n* chore: merge autosynth repo into this repo\n\n* fix: add autsynth directory to code coverage reports\n01b6f23d24b27878b48667ce597876d66b59780e\nfix(python): install testutils from pypi (#503)\n\nhttps://pypi.org/project/google-cloud-testutils/\n716f741f2d307b48cbe8a5bc3bc883571212344a\nfix(python): adjust regex for fix_pb2_headers (#500)\n\nFixes regex and updates copyright year to 2020. \n\nBefore:\n```python\n# -*- coding: utf-8 -*-\n\n# Generated by the protocol buffer compiler. DO NOT EDIT!\n# source: google/type/timeofday.proto\n```\n\nAfter:\n```python\n# -*- coding: utf-8 -*-\n\n# Copyright 2020 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# Generated by the protocol buffer compiler. DO NOT EDIT!\n# source: google/type/timeofday.proto\n```\n21ce80f9d4785dc75aa36d26944bddd5a1b7e25b\nfeat: better Node.js postprocessing (#498)\n\n* feat: better Node.js postprocessing\n\n* docs: docstring for is_gapic_library\n\n* PR feedback\n\n* pass parameter by name\n\nCo-authored-by: Jeffrey Rennie \nCo-authored-by: Benjamin E. Coe \nf5e4c17dc78a966dbf29961dd01f9bbd63e20a04\nfeat: Add include_protos parameter support for GAPICBazel (#489)\n\n* feat: Add include_protos parameter support for GAPICBazel. This is needed to migrate python libraries, which are heavily dependent on this feature.\n\n* add proto_output_path parameter, which is needed for non-standard cases (cloudbuild, containeranalysis, which are under devtools directory)\n\nCo-authored-by: Justin Beckwith \nCo-authored-by: Jeffrey Rennie \n19465d3ec5e5acdb01521d8f3bddd311bcbee28d\nbuild: use codecov's action, now that it's authless (#499)\n\n\n"
+ "sha": "ab883569eb0257bbf16a6d825fd018b3adde3912"
}
}
],
diff --git a/versions.txt b/versions.txt
index dda6e853ad..f8869b69ff 100644
--- a/versions.txt
+++ b/versions.txt
@@ -1,12 +1,12 @@
# Format:
# module:released-version:current-version
-proto-google-cloud-bigquerystorage-v1alpha2:0.96.1:0.96.1
-proto-google-cloud-bigquerystorage-v1beta1:0.96.1:0.96.1
-proto-google-cloud-bigquerystorage-v1beta2:0.96.1:0.96.1
-proto-google-cloud-bigquerystorage-v1:0.96.1:0.96.1
-grpc-google-cloud-bigquerystorage-v1alpha2:0.96.1:0.96.1
-grpc-google-cloud-bigquerystorage-v1beta1:0.96.1:0.96.1
-grpc-google-cloud-bigquerystorage-v1beta2:0.96.1:0.96.1
-grpc-google-cloud-bigquerystorage-v1:0.96.1:0.96.1
-google-cloud-bigquerystorage:0.131.1-beta:0.131.1-beta
+proto-google-cloud-bigquerystorage-v1alpha2:0.96.2:0.96.2
+proto-google-cloud-bigquerystorage-v1beta1:0.96.2:0.96.2
+proto-google-cloud-bigquerystorage-v1beta2:0.96.2:0.96.2
+proto-google-cloud-bigquerystorage-v1:0.96.2:0.96.2
+grpc-google-cloud-bigquerystorage-v1alpha2:0.96.2:0.96.2
+grpc-google-cloud-bigquerystorage-v1beta1:0.96.2:0.96.2
+grpc-google-cloud-bigquerystorage-v1beta2:0.96.2:0.96.2
+grpc-google-cloud-bigquerystorage-v1:0.96.2:0.96.2
+google-cloud-bigquerystorage:0.131.2-beta:0.131.2-beta