diff --git a/.changelog/6557.txt b/.changelog/6557.txt
new file mode 100644
index 00000000000..cbc01b426d9
--- /dev/null
+++ b/.changelog/6557.txt
@@ -0,0 +1,3 @@
+```release-note:enhancement
+bigquery: added `avro_options` field to `google_bigquery_table` resource
+```
diff --git a/google/resource_bigquery_table.go b/google/resource_bigquery_table.go
index 090d2d43bdc..5ff0426cc61 100644
--- a/google/resource_bigquery_table.go
+++ b/google/resource_bigquery_table.go
@@ -586,6 +586,22 @@ func resourceBigQueryTable() *schema.Resource {
},
},
},
+ // AvroOptions: [Optional] Additional options if sourceFormat is set to AVRO.
+ "avro_options": {
+ Type: schema.TypeList,
+ Optional: true,
+ MaxItems: 1,
+ Description: `Additional options if source_format is set to "AVRO"`,
+ Elem: &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "use_avro_logical_types": {
+ Type: schema.TypeBool,
+ Required: true,
+ Description: `If sourceFormat is set to "AVRO", indicates whether to interpret logical types as the corresponding BigQuery data type (for example, TIMESTAMP), instead of using the raw type (for example, INTEGER).`,
+ },
+ },
+ },
+ },
// IgnoreUnknownValues: [Optional] Indicates if BigQuery should
// allow extra values that are not represented in the table schema.
@@ -1324,6 +1340,9 @@ func expandExternalDataConfiguration(cfg interface{}) (*bigquery.ExternalDataCon
if v, ok := raw["hive_partitioning_options"]; ok {
edc.HivePartitioningOptions = expandHivePartitioningOptions(v)
}
+ if v, ok := raw["avro_options"]; ok {
+ edc.AvroOptions = expandAvroOptions(v)
+ }
if v, ok := raw["ignore_unknown_values"]; ok {
edc.IgnoreUnknownValues = v.(bool)
}
@@ -1370,6 +1389,10 @@ func flattenExternalDataConfiguration(edc *bigquery.ExternalDataConfiguration) (
result["hive_partitioning_options"] = flattenHivePartitioningOptions(edc.HivePartitioningOptions)
}
+ if edc.AvroOptions != nil {
+ result["avro_options"] = flattenAvroOptions(edc.AvroOptions)
+ }
+
if edc.IgnoreUnknownValues {
result["ignore_unknown_values"] = edc.IgnoreUnknownValues
}
@@ -1531,6 +1554,31 @@ func flattenHivePartitioningOptions(opts *bigquery.HivePartitioningOptions) []ma
return []map[string]interface{}{result}
}
+func expandAvroOptions(configured interface{}) *bigquery.AvroOptions {
+ if len(configured.([]interface{})) == 0 {
+ return nil
+ }
+
+ raw := configured.([]interface{})[0].(map[string]interface{})
+ opts := &bigquery.AvroOptions{}
+
+ if v, ok := raw["use_avro_logical_types"]; ok {
+ opts.UseAvroLogicalTypes = v.(bool)
+ }
+
+ return opts
+}
+
+func flattenAvroOptions(opts *bigquery.AvroOptions) []map[string]interface{} {
+ result := map[string]interface{}{}
+
+ if opts.UseAvroLogicalTypes {
+ result["use_avro_logical_types"] = opts.UseAvroLogicalTypes
+ }
+
+ return []map[string]interface{}{result}
+}
+
func expandSchema(raw interface{}) (*bigquery.TableSchema, error) {
var fields []*bigquery.TableFieldSchema
diff --git a/google/resource_bigquery_table_test.go b/google/resource_bigquery_table_test.go
index f5c40b050cb..9410427d9d3 100644
--- a/google/resource_bigquery_table_test.go
+++ b/google/resource_bigquery_table_test.go
@@ -599,6 +599,32 @@ func TestAccBigQueryTable_HivePartitioningCustomSchema(t *testing.T) {
})
}
+func TestAccBigQueryTable_AvroPartitioning(t *testing.T) {
+ t.Parallel()
+ bucketName := testBucketName(t)
+ resourceName := "google_bigquery_table.test"
+ datasetID := fmt.Sprintf("tf_test_%s", randString(t, 10))
+ tableID := fmt.Sprintf("tf_test_%s", randString(t, 10))
+ avroFilePath := "./test-fixtures/bigquerytable/avro-generated.avro"
+
+ vcrTest(t, resource.TestCase{
+ PreCheck: func() { testAccPreCheck(t) },
+ Providers: testAccProviders,
+ CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t),
+ Steps: []resource.TestStep{
+ {
+ Config: testAccBigQueryTableAvroPartitioning(bucketName, avroFilePath, datasetID, tableID),
+ },
+ {
+ ResourceName: resourceName,
+ ImportState: true,
+ ImportStateVerify: true,
+ ImportStateVerifyIgnore: []string{"deletion_protection"},
+ },
+ },
+ })
+}
+
func TestAccBigQueryTable_RangePartitioning(t *testing.T) {
t.Parallel()
resourceName := "google_bigquery_table.test"
@@ -1591,6 +1617,44 @@ EOH
`, bucketName, datasetID, tableID)
}
+func testAccBigQueryTableAvroPartitioning(bucketName, avroFilePath, datasetID, tableID string) string {
+ return fmt.Sprintf(`
+resource "google_storage_bucket" "test" {
+ name = "%s"
+ location = "US"
+ force_destroy = true
+}
+
+resource "google_storage_bucket_object" "test" {
+ name = "key1=20200330/init.avro"
+ source = "%s"
+ bucket = google_storage_bucket.test.name
+}
+
+resource "google_bigquery_dataset" "test" {
+ dataset_id = "%s"
+}
+
+resource "google_bigquery_table" "test" {
+ deletion_protection = false
+ table_id = "%s"
+ dataset_id = google_bigquery_dataset.test.dataset_id
+
+ external_data_configuration {
+ source_format = "AVRO"
+ autodetect = true
+ source_uris= ["gs://${google_storage_bucket.test.name}/*"]
+
+ avro_options {
+ use_avro_logical_types = true
+ }
+
+ }
+ depends_on = ["google_storage_bucket_object.test"]
+}
+`, bucketName, avroFilePath, datasetID, tableID)
+}
+
func testAccBigQueryTableRangePartitioning(datasetID, tableID string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
diff --git a/google/test-fixtures/bigquerytable/avro-generated.avro b/google/test-fixtures/bigquerytable/avro-generated.avro
new file mode 100644
index 00000000000..c10ee312e51
Binary files /dev/null and b/google/test-fixtures/bigquerytable/avro-generated.avro differ
diff --git a/website/docs/r/bigquery_table.html.markdown b/website/docs/r/bigquery_table.html.markdown
index c13fc58a4bd..a71beeab998 100644
--- a/website/docs/r/bigquery_table.html.markdown
+++ b/website/docs/r/bigquery_table.html.markdown
@@ -170,6 +170,10 @@ in Terraform state, a `terraform destroy` or `terraform apply` that would delete
partitioning on an unsupported format will lead to an error, as will providing
an invalid specification. Structure is [documented below](#nested_hive_partitioning_options).
+* `avro_options` (Optional) - Additional options if `source_format` is set to
+ "AVRO". Structure is [documented below](#nested_avro_options).
+
+
* `ignore_unknown_values` (Optional) - Indicates if BigQuery should
allow extra values that are not represented in the table schema.
If true, the extra values are ignored. If false, records with
@@ -261,6 +265,13 @@ in Terraform state, a `terraform destroy` or `terraform apply` that would delete
can be either of `gs://bucket/path_to_table` or `gs://bucket/path_to_table/`.
Note that when `mode` is set to `CUSTOM`, you must encode the partition key schema within the `source_uri_prefix` by setting `source_uri_prefix` to `gs://bucket/path_to_table/{key1:TYPE1}/{key2:TYPE2}/{key3:TYPE3}`.
+The `avro_options` block supports:
+
+* `use_avro_logical_types` (Optional) - If is set to true, indicates whether
+ to interpret logical types as the corresponding BigQuery data type
+ (for example, TIMESTAMP), instead of using the raw type (for example, INTEGER).
+
+
The `time_partitioning` block supports:
* `expiration_ms` - (Optional) Number of milliseconds for which to keep the