Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Modified parameters descriptions as per public doc #1480

Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
4734523
Modified parameters descriptions as per public doc
sharan-malyala Apr 8, 2024
fc13f52
Modified parameters for bigtable to vector embeddings template
sharan-malyala Apr 27, 2024
e8b30fd
Modified missing parameters for bigtable to vector embeddings template
sharan-malyala Apr 27, 2024
b82587a
Modified parameters descriptions for gcs to bigtable template
sharan-malyala Apr 27, 2024
26bc39c
Modified parameters descriptions for text to datastore template
sharan-malyala Apr 27, 2024
8492e30
Modified parameters descriptions for google ads to bigquery template
sharan-malyala Apr 27, 2024
584aebb
Modified parameters for PubSub avro to Bigquery template
sharan-malyala Apr 28, 2024
5eb4f7c
Modified parameters descriptions for DataStream to SQL template
sharan-malyala Apr 28, 2024
b5fd4cb
Modified parameters descriptions for PubSub to text template
sharan-malyala Apr 28, 2024
62326bf
pubsub text to java changes
sharan-malyala Apr 28, 2024
c17a793
Modified parameters for Oracle to Bigquery template
sharan-malyala Apr 29, 2024
7a4286a
Modified parameter descriptions for Bigtable change streams to Pub/Su…
sharan-malyala Apr 29, 2024
c3ab422
reverted example properties
sharan-malyala Apr 29, 2024
19257d0
reverted example
sharan-malyala Apr 29, 2024
03f0a42
Apply suggestions from code review
sharan-malyala Apr 30, 2024
57c21d1
reverted the links
sharan-malyala Apr 30, 2024
6c048c9
changed a description
sharan-malyala Apr 30, 2024
d4a8841
pull origin
sharan-malyala Apr 30, 2024
78c92bf
Apply suggestions from code review
sharan-malyala Apr 30, 2024
64443e5
Apply suggestions from code review
sharan-malyala May 1, 2024
1dc745f
modified DayPattern parameter
sharan-malyala May 3, 2024
f7e8acc
Merge branch 'main' into sharantej-dataflowTemplates
sharan-malyala May 3, 2024
b1dc28d
applied suggestions
sharan-malyala May 4, 2024
27fa668
removed optional from a description
sharan-malyala May 6, 2024
d163e07
ran spotless:apply
sharan-malyala May 6, 2024
52181f7
Apply suggestions from code review
sharan-malyala May 7, 2024
d9916c4
Merge branch 'GoogleCloudPlatform:main' into sharantej-dataflowTemplates
sharan-malyala May 7, 2024
623e467
Merge branch 'GoogleCloudPlatform:main' into sharantej-dataflowTemplates
sharan-malyala May 8, 2024
12b84a1
Merge branch 'GoogleCloudPlatform:main' into sharantej-dataflowTemplates
sharan-malyala May 11, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,8 @@ public interface JavascriptTextTransformerOptions extends PipelineOptions {
optional = true,
description = "JavaScript UDF path in Cloud Storage",
helpText =
"The Cloud Storage URI of the .js file that defines the JavaScript user-defined function (UDF) you want to use. For example, `gs://my-bucket/my-udfs/my_file.js`.")
"The Cloud Storage URI of the .js file that defines the JavaScript user-defined function "
+ "(UDF) to use. For example, `gs://my-bucket/my-udfs/my_file.js`.")
ValueProvider<String> getJavascriptTextTransformGcsPath();

void setJavascriptTextTransformGcsPath(ValueProvider<String> javascriptTextTransformGcsPath);
Expand All @@ -89,8 +90,10 @@ public interface JavascriptTextTransformerOptions extends PipelineOptions {
regexes = {"[a-zA-Z0-9_]+"},
description = "JavaScript UDF name",
helpText =
"The name of the JavaScript user-defined function (UDF) to use. For example, if your JavaScript function code is `myTransform(inJson) { /*...do stuff...*/ }`, then the function name is `myTransform`. For sample JavaScript UDFs, see UDF Examples (https://github.com/GoogleCloudPlatform/DataflowTemplates#udf-examples).",
example = "transform_udf1")
"The name of the JavaScript user-defined function (UDF) to use. For example, if your "
+ "JavaScript function code is `myTransform(inJson) { /*...do stuff...*/ }`, then the function name "
+ "is `myTransform`. For sample JavaScript UDFs, see "
+ "UDF Examples (https://github.com/GoogleCloudPlatform/DataflowTemplates#udf-examples).")
ValueProvider<String> getJavascriptTextTransformFunctionName();

void setJavascriptTextTransformFunctionName(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -165,8 +165,7 @@ interface ReadOptions extends BigtableCommonOptions {
order = 3,
optional = true,
description = "Source Cloud Bigtable Project ID",
helpText =
"The Bigtable project ID. The default is the project of the Dataflow job.")
helpText = "The Bigtable project ID. The default is the project for the Dataflow job.")
@Default.String("")
String getBigtableReadProjectId();

Expand All @@ -192,8 +191,7 @@ interface ReadChangeStreamOptions extends BigtableCommonOptions.ReadOptions {
order = 1,
optional = true,
description = "Cloud Bigtable change streams metadata instance ID",
helpText =
"The Bigtable change streams metadata instance ID.")
helpText = "The Bigtable change streams metadata instance ID.")
@Default.String("")
String getBigtableChangeStreamMetadataInstanceId();

Expand All @@ -214,7 +212,8 @@ interface ReadChangeStreamOptions extends BigtableCommonOptions.ReadOptions {
order = 3,
regexes = {"[a-z][a-z0-9\\-_]+[a-z0-9]"},
description = "Cloud Bigtable application profile ID",
helpText = "The Bigtable application profile ID. The application profile must use single-cluster routing and allow single-row transactions.")
helpText =
"The Bigtable application profile ID. The application profile must use single-cluster routing and allow single-row transactions.")
@Validation.Required
String getBigtableChangeStreamAppProfile();

Expand All @@ -226,7 +225,7 @@ interface ReadChangeStreamOptions extends BigtableCommonOptions.ReadOptions {
description =
"Bigtable change streams charset name when reading values and column qualifiers",
helpText =
"The Bigtable change streams charset name when reading rowkeys, values, and column qualifiers. This option is used when message encoding is JSON.")
"The Bigtable change streams charset name when reading values and column qualifiers.")
sharan-malyala marked this conversation as resolved.
Show resolved Hide resolved
@Default.String("UTF-8")
String getBigtableChangeStreamCharset();

Expand All @@ -237,8 +236,9 @@ interface ReadChangeStreamOptions extends BigtableCommonOptions.ReadOptions {
optional = true,
description = "The timestamp to read change streams from",
helpText =
"The starting timestamp (https://tools.ietf.org/html/rfc3339), inclusive, to use for reading change streams. "
+ "For example, `2022-05-05T07:59:59Z`. Defaults to the timestamp of the pipeline start time.")
"The starting timestamp (https://tools.ietf.org/html/rfc3339), inclusive, to use "
+ "for reading change streams. For example, `2022-05-05T07:59:59Z`. "
+ "Defaults to the timestamp of the pipeline start time.")
@Default.String("")
String getBigtableChangeStreamStartTimestamp();

Expand All @@ -248,8 +248,7 @@ interface ReadChangeStreamOptions extends BigtableCommonOptions.ReadOptions {
order = 6,
optional = true,
description = "Cloud Bigtable change streams column families to ignore",
helpText =
"A comma-separated list of column family name changes to ignore.")
helpText = "A comma-separated list of column family name changes to ignore.")
@Default.String("")
String getBigtableChangeStreamIgnoreColumnFamilies();

Expand All @@ -270,8 +269,9 @@ interface ReadChangeStreamOptions extends BigtableCommonOptions.ReadOptions {
optional = true,
description = "A unique name of the client pipeline",
helpText =
"A unique name for the client pipeline. Lets you resume processing from the point at which a previously running pipeline stopped. Defaults "
+ "to the automatically generated name. To find the value, see the Dataflow job logs. ")
"A unique name for the client pipeline. Lets you resume processing "
+ "from the point at which a previously running pipeline stopped. "
+ "Defaults to an automatically generated name. See the Dataflow job logs for the value used.")
String getBigtableChangeStreamName();

void setBigtableChangeStreamName(String value);
Expand All @@ -281,10 +281,12 @@ interface ReadChangeStreamOptions extends BigtableCommonOptions.ReadOptions {
optional = true,
description = "Resume streaming with the same change stream name",
helpText =
"When set to true, a new pipeline resumes processing from the point when a previously running pipeline with the same "
+ "`bigtableChangeStreamName` value stopped. If the pipeline with the given `bigtableChangeStreamName` value has never run, a new pipeline doesn't "
+ "start. When set to `false`, a new pipeline starts. If a pipeline with the same `bigtableChangeStreamName` value has already run "
+ "for the given source, a new pipeline doesn't start. Defaults to `false`.")
"When set to `true`, a new pipeline resumes processing from the point at which a previously "
+ "running pipeline with the same `bigtableChangeStreamName` value stopped. If the "
+ "pipeline with the given `bigtableChangeStreamName` value has never run, a new pipeline "
+ "doesn't start. When set to `false`, a new pipeline starts. If a pipeline with the "
+ "same `bigtableChangeStreamName` value has already run for the given source, "
+ "a new pipeline doesn't start. Defaults to `false`.")
@Default.Boolean(false)
Boolean getBigtableChangeStreamResume();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,10 @@ public interface BigQueryStorageApiStreamingOptions extends BigQueryOptions {
optional = true,
description = "Use BigQuery Storage Write API",
helpText =
"If true, the pipeline uses the Storage Write API (https://cloud.google.com/bigquery/docs/write-api). The default value is `false`."
+ " For more information, see Using the Storage Write API (https://beam.apache.org/documentation/io/built-in/google-bigquery/#storage-write-api).")
"If true, the pipeline uses the "
+ "BigQuery Storage Write API (https://cloud.google.com/bigquery/docs/write-api). "
+ "The default value is `false`. For more information, "
+ "see Using the Storage Write API (https://beam.apache.org/documentation/io/built-in/google-bigquery/#storage-write-api).")
@Default.Boolean(false)
@Override
Boolean getUseStorageWriteApi();
Expand All @@ -49,8 +51,9 @@ public interface BigQueryStorageApiStreamingOptions extends BigQueryOptions {
optional = true,
description = "Number of streams for BigQuery Storage Write API",
helpText =
"When using the Storage Write API, specifies the number of write streams. If `useStorageWriteApi` is `true` and"
+ " `useStorageWriteApiAtLeastOnce` is `false`, then you must set this parameter.")
"When using the Storage Write API, specifies the number of write streams. "
+ "If `useStorageWriteApi` is `true` and `useStorageWriteApiAtLeastOnce` is `false`, "
+ "then you must set this parameter.")
@Override
@Default.Integer(0)
Integer getNumStorageWriteApiStreams();
Expand All @@ -60,8 +63,9 @@ public interface BigQueryStorageApiStreamingOptions extends BigQueryOptions {
optional = true,
description = "Triggering frequency in seconds for BigQuery Storage Write API",
helpText =
"Optional: When using the Storage Write API, specifies the triggering frequency, in seconds. If"
+ " `useStorageWriteApi` is `true` and `useStorageWriteApiAtLeastOnce` is `false`, then you must set this parameter.")
"When using the Storage Write API, specifies the triggering frequency, "
+ "in seconds. If `useStorageWriteApi` is `true` and `useStorageWriteApiAtLeastOnce` is `false`, "
+ "then you must set this parameter.")
@Override
Integer getStorageWriteApiTriggeringFrequencySec();
}
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,11 @@ public interface JdbcToBigQueryOptions
},
groupName = "Source",
description = "JDBC connection URL string.",
helpText = "The JDBC connection URL string. You can pass in this value as a string that's encrypted with a Cloud KMS key and then Base64-encoded. Remove whitespace characters from the Base64-encoded string.",
example = "jdbc:oracle:thin:@some-host:port:sid")
sharan-malyala marked this conversation as resolved.
Show resolved Hide resolved
helpText =
"The JDBC connection URL string. You can pass in this "
+ "value as a string that's encrypted with a Cloud KMS key and then Base64-encoded. "
+ "Remove whitespace characters from the Base64-encoded string.",
example = "jdbc:mysql://some-host:3306/sampledb")
String getConnectionURL();

void setConnectionURL(String connectionURL);
Expand All @@ -66,7 +69,12 @@ public interface JdbcToBigQueryOptions
regexes = {"^[a-zA-Z0-9_;!*&=@#-:\\/]+$"},
groupName = "Source",
description = "JDBC connection property string.",
helpText = "The properties string to use for the JDBC connection. The format of the string must be `[propertyName=property;]*`.",
helpText =
"Optional: The properties string to use for the JDBC connection. The format of the string must "
+ "be `[propertyName=property;]*`."
+ "For more information, see "
+ "Configuration Properties (https://dev.mysql.com/doc/connector-j/8.1/en/connector-j-reference-configuration-properties.html) "
+ "in the MySQL documentation.",
example = "unicode=true;characterEncoding=UTF-8")
String getConnectionProperties();

Expand All @@ -79,7 +87,8 @@ public interface JdbcToBigQueryOptions
groupName = "Source",
description = "JDBC connection username.",
helpText =
"The username to use for the JDBC connection. You can pass in this value as a string that's encrypted with a Cloud KMS key and then Base64-encoded. Remove whitespace characters from the Base64-encoded string.")
"The username to use for the JDBC connection. You can pass in this value as a string that's encrypted with a Cloud KMS "
+ "key and then Base64-encoded.")
String getUsername();

void setUsername(String username);
Expand All @@ -90,7 +99,8 @@ public interface JdbcToBigQueryOptions
groupName = "Source",
description = "JDBC connection password.",
helpText =
"The password to use for the JDBC connection. You can pass in this value as a string that's encrypted with a Cloud KMS key and then Base64-encoded. Remove whitespace characters from the Base64-encoded string.")
"The password to use for the JDBC connection. You can pass in this value as a string that's encrypted with a Cloud KMS "
+ "key and then Base64-encoded.")
String getPassword();

void setPassword(String password);
Expand All @@ -101,7 +111,9 @@ public interface JdbcToBigQueryOptions
regexes = {"^.+$"},
groupName = "Source",
description = "JDBC source SQL query",
helpText = "Optional when using partitions. The query to run on the source to extract the data. Either query OR both table AND PartitionColumn must be specified.",
helpText =
"The query to run on the source to extract the data."
+ "Required when not using partitions.",
example = "select * from sampledb.sample_table")
String getQuery();

Expand All @@ -113,7 +125,8 @@ public interface JdbcToBigQueryOptions
order = 8,
groupName = "Target",
description = "BigQuery output table",
helpText = "The BigQuery output table location.",
helpText =
"The BigQuery output table location, in the format <PROJECT_ID>:<DATASET_NAME>.<TABLE_NAME>.",
sharan-malyala marked this conversation as resolved.
Show resolved Hide resolved
example = "<my-project>:<my-dataset>.<my-table>")
String getOutputTable();

Expand Down Expand Up @@ -174,7 +187,8 @@ public interface JdbcToBigQueryOptions
groupName = "Source",
description = "The name of a column of numeric type that will be used for partitioning.",
helpText =
"Required when using partitions. The name of a column to use for partitioning. Only numeric columns are supported.")
"The name of a column to use for partitioning. Only numeric columns are supported. "
+ "Required when using partitions.")
String getPartitionColumn();

void setPartitionColumn(String partitionColumn);
Expand All @@ -185,8 +199,9 @@ public interface JdbcToBigQueryOptions
groupName = "Source",
description = "Name of the table in the external database.",
helpText =
"Required when using partitions. The table to extract the data from. This parameter also accepts a subquery in parentheses.",
example = "(select id, name from Person) as subq")
"The table to extract the data from. This parameter also accepts a subquery in parentheses. "
+ "Required when using partitions.",
example = "Person or (select id, name from Person) as subq")
String getTable();

void setTable(String table);
Expand All @@ -197,7 +212,7 @@ public interface JdbcToBigQueryOptions
groupName = "Source",
description = "The number of partitions.",
helpText =
"The number of partitions to use. If not specified, a conservative number is assumed by the worker.")
"The number of partitions to use. If not specified, the worker uses a conservative number of partitions.")
Integer getNumPartitions();

void setNumPartitions(Integer numPartitions);
Expand Down
You are viewing a condensed version of this merge commit. You can view the full changes here.