Skip to content

Commit

Permalink
feat: [datastream] Max concurrent backfill tasks (#9414)
Browse files Browse the repository at this point in the history
- [ ] Regenerate this pull request now.

You can now set the number of maximum concurrent backfill tasks for a stream using the Datastream API.

PiperOrigin-RevId: 530067890

Source-Link: https://togithub.com/googleapis/googleapis/commit/b2c290faf99f388efb1e080333cba620f38e5afc

Source-Link: https://togithub.com/googleapis/googleapis-gen/commit/83c5413e535cb6fa710541dc6e169a8a44b8a65d
Copy-Tag: eyJwIjoiamF2YS1kYXRhc3RyZWFtLy5Pd2xCb3QueWFtbCIsImgiOiI4M2M1NDEzZTUzNWNiNmZhNzEwNTQxZGM2ZTE2OWE4YTQ0YjhhNjVkIn0=
  • Loading branch information
gcf-owl-bot[bot] committed May 9, 2023
1 parent 7d5c602 commit 253ad50
Show file tree
Hide file tree
Showing 46 changed files with 1,431 additions and 654 deletions.
4 changes: 2 additions & 2 deletions java-datastream/README.md
Expand Up @@ -20,7 +20,7 @@ If you are using Maven with [BOM][libraries-bom], add this to your pom.xml file:
<dependency>
<groupId>com.google.cloud</groupId>
<artifactId>libraries-bom</artifactId>
<version>26.11.0</version>
<version>26.14.0</version>
<type>pom</type>
<scope>import</scope>
</dependency>
Expand Down Expand Up @@ -195,7 +195,7 @@ Java is a registered trademark of Oracle and/or its affiliates.
[kokoro-badge-link-5]: http://storage.googleapis.com/cloud-devrel-public/java/badges/google-cloud-java/java11.html
[stability-image]: https://img.shields.io/badge/stability-stable-green
[maven-version-image]: https://img.shields.io/maven-central/v/com.google.cloud/google-cloud-datastream.svg
[maven-version-link]: https://central.sonatype.com/artifact/com.google.cloud/google-cloud-datastream/1.13.0
[maven-version-link]: https://central.sonatype.com/artifact/com.google.cloud/google-cloud-datastream/1.15.0
[authentication]: https://github.com/googleapis/google-cloud-java#authentication
[auth-scopes]: https://developers.google.com/identity/protocols/oauth2/scopes
[predefined-iam-roles]: https://cloud.google.com/iam/docs/understanding-roles#predefined_roles
Expand Down
Expand Up @@ -2581,6 +2581,7 @@ public final OperationFuture<PrivateConnection, OperationMetadata> createPrivate
* .setPrivateConnectionId("privateConnectionId-1926654532")
* .setPrivateConnection(PrivateConnection.newBuilder().build())
* .setRequestId("requestId693933066")
* .setForce(true)
* .build();
* PrivateConnection response = datastreamClient.createPrivateConnectionAsync(request).get();
* }
Expand Down Expand Up @@ -2613,6 +2614,7 @@ public final OperationFuture<PrivateConnection, OperationMetadata> createPrivate
* .setPrivateConnectionId("privateConnectionId-1926654532")
* .setPrivateConnection(PrivateConnection.newBuilder().build())
* .setRequestId("requestId693933066")
* .setForce(true)
* .build();
* OperationFuture<PrivateConnection, OperationMetadata> future =
* datastreamClient.createPrivateConnectionOperationCallable().futureCall(request);
Expand Down Expand Up @@ -2646,6 +2648,7 @@ public final OperationFuture<PrivateConnection, OperationMetadata> createPrivate
* .setPrivateConnectionId("privateConnectionId-1926654532")
* .setPrivateConnection(PrivateConnection.newBuilder().build())
* .setRequestId("requestId693933066")
* .setForce(true)
* .build();
* ApiFuture<Operation> future =
* datastreamClient.createPrivateConnectionCallable().futureCall(request);
Expand Down
Expand Up @@ -801,6 +801,7 @@ public class HttpJsonDatastreamStub extends DatastreamStub {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<CreatePrivateConnectionRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "force", request.getForce());
serializer.putQueryParam(
fields, "privateConnectionId", request.getPrivateConnectionId());
serializer.putQueryParam(fields, "requestId", request.getRequestId());
Expand Down
Expand Up @@ -18,7 +18,15 @@

package com.google.cloud.datastream.v1;

/** Protobuf type {@code google.cloud.datastream.v1.BigQueryDestinationConfig} */
/**
*
*
* <pre>
* BigQuery destination configuration
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1.BigQueryDestinationConfig}
*/
public final class BigQueryDestinationConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datastream.v1.BigQueryDestinationConfig)
Expand Down Expand Up @@ -63,12 +71,24 @@ public interface SingleTargetDatasetOrBuilder
com.google.protobuf.MessageOrBuilder {

/**
*
*
* <pre>
* The dataset ID of the target dataset.
* </pre>
*
* <code>string dataset_id = 1;</code>
*
* @return The datasetId.
*/
java.lang.String getDatasetId();
/**
*
*
* <pre>
* The dataset ID of the target dataset.
* </pre>
*
* <code>string dataset_id = 1;</code>
*
* @return The bytes for datasetId.
Expand Down Expand Up @@ -130,6 +150,12 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
@SuppressWarnings("serial")
private volatile java.lang.Object datasetId_ = "";
/**
*
*
* <pre>
* The dataset ID of the target dataset.
* </pre>
*
* <code>string dataset_id = 1;</code>
*
* @return The datasetId.
Expand All @@ -147,6 +173,12 @@ public java.lang.String getDatasetId() {
}
}
/**
*
*
* <pre>
* The dataset ID of the target dataset.
* </pre>
*
* <code>string dataset_id = 1;</code>
*
* @return The bytes for datasetId.
Expand Down Expand Up @@ -534,6 +566,12 @@ public Builder mergeFrom(

private java.lang.Object datasetId_ = "";
/**
*
*
* <pre>
* The dataset ID of the target dataset.
* </pre>
*
* <code>string dataset_id = 1;</code>
*
* @return The datasetId.
Expand All @@ -550,6 +588,12 @@ public java.lang.String getDatasetId() {
}
}
/**
*
*
* <pre>
* The dataset ID of the target dataset.
* </pre>
*
* <code>string dataset_id = 1;</code>
*
* @return The bytes for datasetId.
Expand All @@ -566,6 +610,12 @@ public com.google.protobuf.ByteString getDatasetIdBytes() {
}
}
/**
*
*
* <pre>
* The dataset ID of the target dataset.
* </pre>
*
* <code>string dataset_id = 1;</code>
*
* @param value The datasetId to set.
Expand All @@ -581,6 +631,12 @@ public Builder setDatasetId(java.lang.String value) {
return this;
}
/**
*
*
* <pre>
* The dataset ID of the target dataset.
* </pre>
*
* <code>string dataset_id = 1;</code>
*
* @return This builder for chaining.
Expand All @@ -592,6 +648,12 @@ public Builder clearDatasetId() {
return this;
}
/**
*
*
* <pre>
* The dataset ID of the target dataset.
* </pre>
*
* <code>string dataset_id = 1;</code>
*
* @param value The bytes for datasetId to set.
Expand Down Expand Up @@ -683,6 +745,12 @@ public interface SourceHierarchyDatasetsOrBuilder
com.google.protobuf.MessageOrBuilder {

/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -691,6 +759,12 @@ public interface SourceHierarchyDatasetsOrBuilder
*/
boolean hasDatasetTemplate();
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -700,6 +774,12 @@ public interface SourceHierarchyDatasetsOrBuilder
com.google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate
getDatasetTemplate();
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand Down Expand Up @@ -1985,6 +2065,12 @@ public com.google.protobuf.Parser<DatasetTemplate> getParserForType() {
.DatasetTemplate
datasetTemplate_;
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -1996,6 +2082,12 @@ public boolean hasDatasetTemplate() {
return datasetTemplate_ != null;
}
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -2012,6 +2104,12 @@ public boolean hasDatasetTemplate() {
: datasetTemplate_;
}
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand Down Expand Up @@ -2421,6 +2519,12 @@ public Builder mergeFrom(
.DatasetTemplateOrBuilder>
datasetTemplateBuilder_;
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -2431,6 +2535,12 @@ public boolean hasDatasetTemplate() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -2450,6 +2560,12 @@ public boolean hasDatasetTemplate() {
}
}
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -2471,6 +2587,12 @@ public Builder setDatasetTemplate(
return this;
}
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -2489,6 +2611,12 @@ public Builder setDatasetTemplate(
return this;
}
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -2515,6 +2643,12 @@ public Builder mergeDatasetTemplate(
return this;
}
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -2530,6 +2664,12 @@ public Builder clearDatasetTemplate() {
return this;
}
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -2542,6 +2682,12 @@ public Builder clearDatasetTemplate() {
return getDatasetTemplateFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -2559,6 +2705,12 @@ public Builder clearDatasetTemplate() {
}
}
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand Down Expand Up @@ -3101,7 +3253,15 @@ protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.Build
Builder builder = new Builder(parent);
return builder;
}
/** Protobuf type {@code google.cloud.datastream.v1.BigQueryDestinationConfig} */
/**
*
*
* <pre>
* BigQuery destination configuration
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1.BigQueryDestinationConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datastream.v1.BigQueryDestinationConfig)
Expand Down

0 comments on commit 253ad50

Please sign in to comment.