Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: [datastream] Max concurrent backfill tasks #9414

Merged
merged 2 commits into from
May 9, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions java-datastream/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ If you are using Maven with [BOM][libraries-bom], add this to your pom.xml file:
<dependency>
<groupId>com.google.cloud</groupId>
<artifactId>libraries-bom</artifactId>
<version>26.11.0</version>
<version>26.14.0</version>
<type>pom</type>
<scope>import</scope>
</dependency>
Expand Down Expand Up @@ -195,7 +195,7 @@ Java is a registered trademark of Oracle and/or its affiliates.
[kokoro-badge-link-5]: http://storage.googleapis.com/cloud-devrel-public/java/badges/google-cloud-java/java11.html
[stability-image]: https://img.shields.io/badge/stability-stable-green
[maven-version-image]: https://img.shields.io/maven-central/v/com.google.cloud/google-cloud-datastream.svg
[maven-version-link]: https://central.sonatype.com/artifact/com.google.cloud/google-cloud-datastream/1.13.0
[maven-version-link]: https://central.sonatype.com/artifact/com.google.cloud/google-cloud-datastream/1.15.0
[authentication]: https://github.com/googleapis/google-cloud-java#authentication
[auth-scopes]: https://developers.google.com/identity/protocols/oauth2/scopes
[predefined-iam-roles]: https://cloud.google.com/iam/docs/understanding-roles#predefined_roles
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2581,6 +2581,7 @@ public final OperationFuture<PrivateConnection, OperationMetadata> createPrivate
* .setPrivateConnectionId("privateConnectionId-1926654532")
* .setPrivateConnection(PrivateConnection.newBuilder().build())
* .setRequestId("requestId693933066")
* .setForce(true)
* .build();
* PrivateConnection response = datastreamClient.createPrivateConnectionAsync(request).get();
* }
Expand Down Expand Up @@ -2613,6 +2614,7 @@ public final OperationFuture<PrivateConnection, OperationMetadata> createPrivate
* .setPrivateConnectionId("privateConnectionId-1926654532")
* .setPrivateConnection(PrivateConnection.newBuilder().build())
* .setRequestId("requestId693933066")
* .setForce(true)
* .build();
* OperationFuture<PrivateConnection, OperationMetadata> future =
* datastreamClient.createPrivateConnectionOperationCallable().futureCall(request);
Expand Down Expand Up @@ -2646,6 +2648,7 @@ public final OperationFuture<PrivateConnection, OperationMetadata> createPrivate
* .setPrivateConnectionId("privateConnectionId-1926654532")
* .setPrivateConnection(PrivateConnection.newBuilder().build())
* .setRequestId("requestId693933066")
* .setForce(true)
* .build();
* ApiFuture<Operation> future =
* datastreamClient.createPrivateConnectionCallable().futureCall(request);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -801,6 +801,7 @@ public class HttpJsonDatastreamStub extends DatastreamStub {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<CreatePrivateConnectionRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "force", request.getForce());
serializer.putQueryParam(
fields, "privateConnectionId", request.getPrivateConnectionId());
serializer.putQueryParam(fields, "requestId", request.getRequestId());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,15 @@

package com.google.cloud.datastream.v1;

/** Protobuf type {@code google.cloud.datastream.v1.BigQueryDestinationConfig} */
/**
*
*
* <pre>
* BigQuery destination configuration
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1.BigQueryDestinationConfig}
*/
public final class BigQueryDestinationConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datastream.v1.BigQueryDestinationConfig)
Expand Down Expand Up @@ -63,12 +71,24 @@ public interface SingleTargetDatasetOrBuilder
com.google.protobuf.MessageOrBuilder {

/**
*
*
* <pre>
* The dataset ID of the target dataset.
* </pre>
*
* <code>string dataset_id = 1;</code>
*
* @return The datasetId.
*/
java.lang.String getDatasetId();
/**
*
*
* <pre>
* The dataset ID of the target dataset.
* </pre>
*
* <code>string dataset_id = 1;</code>
*
* @return The bytes for datasetId.
Expand Down Expand Up @@ -130,6 +150,12 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
@SuppressWarnings("serial")
private volatile java.lang.Object datasetId_ = "";
/**
*
*
* <pre>
* The dataset ID of the target dataset.
* </pre>
*
* <code>string dataset_id = 1;</code>
*
* @return The datasetId.
Expand All @@ -147,6 +173,12 @@ public java.lang.String getDatasetId() {
}
}
/**
*
*
* <pre>
* The dataset ID of the target dataset.
* </pre>
*
* <code>string dataset_id = 1;</code>
*
* @return The bytes for datasetId.
Expand Down Expand Up @@ -534,6 +566,12 @@ public Builder mergeFrom(

private java.lang.Object datasetId_ = "";
/**
*
*
* <pre>
* The dataset ID of the target dataset.
* </pre>
*
* <code>string dataset_id = 1;</code>
*
* @return The datasetId.
Expand All @@ -550,6 +588,12 @@ public java.lang.String getDatasetId() {
}
}
/**
*
*
* <pre>
* The dataset ID of the target dataset.
* </pre>
*
* <code>string dataset_id = 1;</code>
*
* @return The bytes for datasetId.
Expand All @@ -566,6 +610,12 @@ public com.google.protobuf.ByteString getDatasetIdBytes() {
}
}
/**
*
*
* <pre>
* The dataset ID of the target dataset.
* </pre>
*
* <code>string dataset_id = 1;</code>
*
* @param value The datasetId to set.
Expand All @@ -581,6 +631,12 @@ public Builder setDatasetId(java.lang.String value) {
return this;
}
/**
*
*
* <pre>
* The dataset ID of the target dataset.
* </pre>
*
* <code>string dataset_id = 1;</code>
*
* @return This builder for chaining.
Expand All @@ -592,6 +648,12 @@ public Builder clearDatasetId() {
return this;
}
/**
*
*
* <pre>
* The dataset ID of the target dataset.
* </pre>
*
* <code>string dataset_id = 1;</code>
*
* @param value The bytes for datasetId to set.
Expand Down Expand Up @@ -683,6 +745,12 @@ public interface SourceHierarchyDatasetsOrBuilder
com.google.protobuf.MessageOrBuilder {

/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -691,6 +759,12 @@ public interface SourceHierarchyDatasetsOrBuilder
*/
boolean hasDatasetTemplate();
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -700,6 +774,12 @@ public interface SourceHierarchyDatasetsOrBuilder
com.google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate
getDatasetTemplate();
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand Down Expand Up @@ -1985,6 +2065,12 @@ public com.google.protobuf.Parser<DatasetTemplate> getParserForType() {
.DatasetTemplate
datasetTemplate_;
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -1996,6 +2082,12 @@ public boolean hasDatasetTemplate() {
return datasetTemplate_ != null;
}
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -2012,6 +2104,12 @@ public boolean hasDatasetTemplate() {
: datasetTemplate_;
}
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand Down Expand Up @@ -2421,6 +2519,12 @@ public Builder mergeFrom(
.DatasetTemplateOrBuilder>
datasetTemplateBuilder_;
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -2431,6 +2535,12 @@ public boolean hasDatasetTemplate() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -2450,6 +2560,12 @@ public boolean hasDatasetTemplate() {
}
}
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -2471,6 +2587,12 @@ public Builder setDatasetTemplate(
return this;
}
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -2489,6 +2611,12 @@ public Builder setDatasetTemplate(
return this;
}
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -2515,6 +2643,12 @@ public Builder mergeDatasetTemplate(
return this;
}
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -2530,6 +2664,12 @@ public Builder clearDatasetTemplate() {
return this;
}
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -2542,6 +2682,12 @@ public Builder clearDatasetTemplate() {
return getDatasetTemplateFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand All @@ -2559,6 +2705,12 @@ public Builder clearDatasetTemplate() {
}
}
/**
*
*
* <pre>
* The dataset template to use for dynamic dataset creation.
* </pre>
*
* <code>
* .google.cloud.datastream.v1.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate dataset_template = 2;
* </code>
Expand Down Expand Up @@ -3101,7 +3253,15 @@ protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.Build
Builder builder = new Builder(parent);
return builder;
}
/** Protobuf type {@code google.cloud.datastream.v1.BigQueryDestinationConfig} */
/**
*
*
* <pre>
* BigQuery destination configuration
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1.BigQueryDestinationConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datastream.v1.BigQueryDestinationConfig)
Expand Down
Loading