Skip to content

Commit

Permalink
avro_options added along with test (#6557) (#12750)
Browse files Browse the repository at this point in the history
* avro_options added along with test

* avro test file changed

* markdown file updated

* Suggested Changes in Spacing and Field Required

Signed-off-by: Modular Magician <magic-modules@google.com>

Signed-off-by: Modular Magician <magic-modules@google.com>
  • Loading branch information
modular-magician committed Oct 10, 2022
1 parent b5b027b commit 8fa339e
Show file tree
Hide file tree
Showing 5 changed files with 126 additions and 0 deletions.
3 changes: 3 additions & 0 deletions .changelog/6557.txt
@@ -0,0 +1,3 @@
```release-note:enhancement
bigquery: added `avro_options` field to `google_bigquery_table` resource
```
48 changes: 48 additions & 0 deletions google/resource_bigquery_table.go
Expand Up @@ -586,6 +586,22 @@ func resourceBigQueryTable() *schema.Resource {
},
},
},
// AvroOptions: [Optional] Additional options if sourceFormat is set to AVRO.
"avro_options": {
Type: schema.TypeList,
Optional: true,
MaxItems: 1,
Description: `Additional options if source_format is set to "AVRO"`,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"use_avro_logical_types": {
Type: schema.TypeBool,
Required: true,
Description: `If sourceFormat is set to "AVRO", indicates whether to interpret logical types as the corresponding BigQuery data type (for example, TIMESTAMP), instead of using the raw type (for example, INTEGER).`,
},
},
},
},

// IgnoreUnknownValues: [Optional] Indicates if BigQuery should
// allow extra values that are not represented in the table schema.
Expand Down Expand Up @@ -1324,6 +1340,9 @@ func expandExternalDataConfiguration(cfg interface{}) (*bigquery.ExternalDataCon
if v, ok := raw["hive_partitioning_options"]; ok {
edc.HivePartitioningOptions = expandHivePartitioningOptions(v)
}
if v, ok := raw["avro_options"]; ok {
edc.AvroOptions = expandAvroOptions(v)
}
if v, ok := raw["ignore_unknown_values"]; ok {
edc.IgnoreUnknownValues = v.(bool)
}
Expand Down Expand Up @@ -1370,6 +1389,10 @@ func flattenExternalDataConfiguration(edc *bigquery.ExternalDataConfiguration) (
result["hive_partitioning_options"] = flattenHivePartitioningOptions(edc.HivePartitioningOptions)
}

if edc.AvroOptions != nil {
result["avro_options"] = flattenAvroOptions(edc.AvroOptions)
}

if edc.IgnoreUnknownValues {
result["ignore_unknown_values"] = edc.IgnoreUnknownValues
}
Expand Down Expand Up @@ -1531,6 +1554,31 @@ func flattenHivePartitioningOptions(opts *bigquery.HivePartitioningOptions) []ma
return []map[string]interface{}{result}
}

func expandAvroOptions(configured interface{}) *bigquery.AvroOptions {
if len(configured.([]interface{})) == 0 {
return nil
}

raw := configured.([]interface{})[0].(map[string]interface{})
opts := &bigquery.AvroOptions{}

if v, ok := raw["use_avro_logical_types"]; ok {
opts.UseAvroLogicalTypes = v.(bool)
}

return opts
}

func flattenAvroOptions(opts *bigquery.AvroOptions) []map[string]interface{} {
result := map[string]interface{}{}

if opts.UseAvroLogicalTypes {
result["use_avro_logical_types"] = opts.UseAvroLogicalTypes
}

return []map[string]interface{}{result}
}

func expandSchema(raw interface{}) (*bigquery.TableSchema, error) {
var fields []*bigquery.TableFieldSchema

Expand Down
64 changes: 64 additions & 0 deletions google/resource_bigquery_table_test.go
Expand Up @@ -599,6 +599,32 @@ func TestAccBigQueryTable_HivePartitioningCustomSchema(t *testing.T) {
})
}

func TestAccBigQueryTable_AvroPartitioning(t *testing.T) {
t.Parallel()
bucketName := testBucketName(t)
resourceName := "google_bigquery_table.test"
datasetID := fmt.Sprintf("tf_test_%s", randString(t, 10))
tableID := fmt.Sprintf("tf_test_%s", randString(t, 10))
avroFilePath := "./test-fixtures/bigquerytable/avro-generated.avro"

vcrTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryTableAvroPartitioning(bucketName, avroFilePath, datasetID, tableID),
},
{
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"deletion_protection"},
},
},
})
}

func TestAccBigQueryTable_RangePartitioning(t *testing.T) {
t.Parallel()
resourceName := "google_bigquery_table.test"
Expand Down Expand Up @@ -1591,6 +1617,44 @@ EOH
`, bucketName, datasetID, tableID)
}

func testAccBigQueryTableAvroPartitioning(bucketName, avroFilePath, datasetID, tableID string) string {
return fmt.Sprintf(`
resource "google_storage_bucket" "test" {
name = "%s"
location = "US"
force_destroy = true
}
resource "google_storage_bucket_object" "test" {
name = "key1=20200330/init.avro"
source = "%s"
bucket = google_storage_bucket.test.name
}
resource "google_bigquery_dataset" "test" {
dataset_id = "%s"
}
resource "google_bigquery_table" "test" {
deletion_protection = false
table_id = "%s"
dataset_id = google_bigquery_dataset.test.dataset_id
external_data_configuration {
source_format = "AVRO"
autodetect = true
source_uris= ["gs://${google_storage_bucket.test.name}/*"]
avro_options {
use_avro_logical_types = true
}
}
depends_on = ["google_storage_bucket_object.test"]
}
`, bucketName, avroFilePath, datasetID, tableID)
}

func testAccBigQueryTableRangePartitioning(datasetID, tableID string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
Expand Down
Binary file not shown.
11 changes: 11 additions & 0 deletions website/docs/r/bigquery_table.html.markdown
Expand Up @@ -170,6 +170,10 @@ in Terraform state, a `terraform destroy` or `terraform apply` that would delete
partitioning on an unsupported format will lead to an error, as will providing
an invalid specification. Structure is [documented below](#nested_hive_partitioning_options).

* `avro_options` (Optional) - Additional options if `source_format` is set to
"AVRO". Structure is [documented below](#nested_avro_options).


* `ignore_unknown_values` (Optional) - Indicates if BigQuery should
allow extra values that are not represented in the table schema.
If true, the extra values are ignored. If false, records with
Expand Down Expand Up @@ -261,6 +265,13 @@ in Terraform state, a `terraform destroy` or `terraform apply` that would delete
can be either of `gs://bucket/path_to_table` or `gs://bucket/path_to_table/`.
Note that when `mode` is set to `CUSTOM`, you must encode the partition key schema within the `source_uri_prefix` by setting `source_uri_prefix` to `gs://bucket/path_to_table/{key1:TYPE1}/{key2:TYPE2}/{key3:TYPE3}`.

<a name="nested_avro_options"></a>The `avro_options` block supports:

* `use_avro_logical_types` (Optional) - If is set to true, indicates whether
to interpret logical types as the corresponding BigQuery data type
(for example, TIMESTAMP), instead of using the raw type (for example, INTEGER).


<a name="nested_time_partitioning"></a>The `time_partitioning` block supports:

* `expiration_ms` - (Optional) Number of milliseconds for which to keep the
Expand Down

0 comments on commit 8fa339e

Please sign in to comment.