From 2a46ec3df419bbeba6a2e60b2b621dc60f31eab1 Mon Sep 17 00:00:00 2001 From: Yiru Tang Date: Wed, 7 Dec 2022 14:51:50 -0800 Subject: [PATCH] fix: update JsonStreamWriterBuilder comment and update sample to use the latest schema retrieval support (#1902) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: update sample to use the latest schema retrival support * . * . * . * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * . * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../cloud/bigquery/storage/v1/JsonStreamWriter.java | 10 ++++++++++ .../bigquerystorage/WriteToDefaultStream.java | 13 +++---------- 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/JsonStreamWriter.java b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/JsonStreamWriter.java index 6380af4fc6..d2c58e372b 100644 --- a/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/JsonStreamWriter.java +++ b/google-cloud-bigquerystorage/src/main/java/com/google/cloud/bigquery/storage/v1/JsonStreamWriter.java @@ -320,6 +320,11 @@ private void setStreamWriterSettings( * newBuilder that constructs a JsonStreamWriter builder with BigQuery client being initialized by * StreamWriter by default. * + *

The table schema passed in will be updated automatically when there is a schema update + * event. When used for Writer creation, it should be the latest schema. So when you are trying to + * reuse a stream, you should use Builder newBuilder( String streamOrTableName, + * BigQueryWriteClient client) instead, so the created Writer will be based on a fresh schema. + * * @param streamOrTableName name of the stream that must follow * "projects/[^/]+/datasets/[^/]+/tables/[^/]+/streams/[^/]+" or table name * "projects/[^/]+/datasets/[^/]+/tables/[^/]+" @@ -336,6 +341,11 @@ public static Builder newBuilder(String streamOrTableName, TableSchema tableSche /** * newBuilder that constructs a JsonStreamWriter builder. * + *

The table schema passed in will be updated automatically when there is a schema update + * event. When used for Writer creation, it should be the latest schema. So when you are trying to + * reuse a stream, you should use Builder newBuilder( String streamOrTableName, + * BigQueryWriteClient client) instead, so the created Writer will be based on a fresh schema. + * * @param streamOrTableName name of the stream that must follow * "projects/[^/]+/datasets/[^/]+/tables/[^/]+/streams/[^/]+" * @param tableSchema The schema of the table when the stream was created, which is passed back diff --git a/samples/snippets/src/main/java/com/example/bigquerystorage/WriteToDefaultStream.java b/samples/snippets/src/main/java/com/example/bigquerystorage/WriteToDefaultStream.java index 1913360b8a..d8f0cc38b5 100644 --- a/samples/snippets/src/main/java/com/example/bigquerystorage/WriteToDefaultStream.java +++ b/samples/snippets/src/main/java/com/example/bigquerystorage/WriteToDefaultStream.java @@ -24,16 +24,14 @@ import com.google.cloud.bigquery.BigQuery; import com.google.cloud.bigquery.BigQueryOptions; import com.google.cloud.bigquery.QueryJobConfiguration; -import com.google.cloud.bigquery.Schema; -import com.google.cloud.bigquery.Table; import com.google.cloud.bigquery.TableResult; import com.google.cloud.bigquery.storage.v1.AppendRowsResponse; +import com.google.cloud.bigquery.storage.v1.BigQueryWriteClient; import com.google.cloud.bigquery.storage.v1.Exceptions; import com.google.cloud.bigquery.storage.v1.Exceptions.AppendSerializtionError; import com.google.cloud.bigquery.storage.v1.Exceptions.StorageException; import com.google.cloud.bigquery.storage.v1.JsonStreamWriter; import com.google.cloud.bigquery.storage.v1.TableName; -import com.google.cloud.bigquery.storage.v1.TableSchema; import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.MoreExecutors; import com.google.protobuf.Descriptors.DescriptorValidationException; @@ -146,17 +144,12 @@ private static class DataWriter { public void initialize(TableName parentTable) throws DescriptorValidationException, IOException, InterruptedException { - // Retrive table schema information. - BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); - Table table = bigquery.getTable(parentTable.getDataset(), parentTable.getTable()); - Schema schema = table.getDefinition().getSchema(); - TableSchema tableSchema = BqToBqStorageSchemaConverter.convertTableSchema(schema); - // Use the JSON stream writer to send records in JSON format. Specify the table name to write // to the default stream. // For more information about JsonStreamWriter, see: // https://googleapis.dev/java/google-cloud-bigquerystorage/latest/com/google/cloud/bigquery/storage/v1/JsonStreamWriter.html - streamWriter = JsonStreamWriter.newBuilder(parentTable.toString(), tableSchema).build(); + streamWriter = + JsonStreamWriter.newBuilder(parentTable.toString(), BigQueryWriteClient.create()).build(); } public void append(AppendContext appendContext)