From 538b863f5bf8f499b4598c7ace94118a1aafb3ce Mon Sep 17 00:00:00 2001 From: The Magician Date: Tue, 29 Nov 2022 11:52:44 -0800 Subject: [PATCH] Added customizeDiff for a couple of subfields of params field in google_bigquery_data_transfer_config (#6784) (#13137) * Added customize diff for params field in google_bigquery_data_transfer_config * Added test cases for params field for google_bigquery_data_transfer_config * Added unit tests to cover ForceNew behaviour of params field * Added handling for customizeDiff in google_bigquery_data_transfer_config * Added comments for parmasCustomizeDiff function in google_bigquery_data_transfer_config * Added test case for different data_source_id in resource_bigquery_data_transfer_config * Updated test case for different data_source_id in resource_bigquery_data_transfer_config * Updated error message for resource_bigquery_data_transfer_config_test.go * Updated test case for resource_bigquery_data_transfer_config_test.go Signed-off-by: Modular Magician Signed-off-by: Modular Magician --- .changelog/6784.txt | 3 + .../resource_bigquery_data_transfer_config.go | 37 +++- ...urce_bigquery_data_transfer_config_test.go | 204 ++++++++++++++++++ 3 files changed, 243 insertions(+), 1 deletion(-) create mode 100644 .changelog/6784.txt diff --git a/.changelog/6784.txt b/.changelog/6784.txt new file mode 100644 index 00000000000..f7ba679f667 --- /dev/null +++ b/.changelog/6784.txt @@ -0,0 +1,3 @@ +```release-note:enhancement +bigquerydatatransfer: recreate `google_bigquery_data_transfer_config` for Cloud Storage transfers when immutable params `data_path_template` and `destination_table_name_template` are changed +``` diff --git a/google/resource_bigquery_data_transfer_config.go b/google/resource_bigquery_data_transfer_config.go index 9f51dec6b7c..997542dc55f 100644 --- a/google/resource_bigquery_data_transfer_config.go +++ b/google/resource_bigquery_data_transfer_config.go @@ -22,6 +22,7 @@ import ( "strings" "time" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) @@ -38,6 +39,40 @@ func sensitiveParamCustomizeDiff(_ context.Context, diff *schema.ResourceDiff, v return nil } +// This customizeDiff is to use ForceNew for params fields data_path_template and +// destination_table_name_template only if the value of "data_source_id" is "google_cloud_storage". +func paramsCustomizeDiffFunc(diff TerraformResourceDiff) error { + old, new := diff.GetChange("params") + dsId := diff.Get("data_source_id").(string) + oldParams := old.(map[string]interface{}) + newParams := new.(map[string]interface{}) + var err error + + if dsId == "google_cloud_storage" { + if oldParams["data_path_template"] != nil && newParams["data_path_template"] != nil && oldParams["data_path_template"].(string) != newParams["data_path_template"].(string) { + err = diff.ForceNew("params") + if err != nil { + return fmt.Errorf("ForceNew failed for params, old - %v and new - %v", oldParams, newParams) + } + return nil + } + + if oldParams["destination_table_name_template"] != nil && newParams["destination_table_name_template"] != nil && oldParams["destination_table_name_template"].(string) != newParams["destination_table_name_template"].(string) { + err = diff.ForceNew("params") + if err != nil { + return fmt.Errorf("ForceNew failed for params, old - %v and new - %v", oldParams, newParams) + } + return nil + } + } + + return nil +} + +func paramsCustomizeDiff(_ context.Context, diff *schema.ResourceDiff, v interface{}) error { + return paramsCustomizeDiffFunc(diff) +} + func resourceBigqueryDataTransferConfig() *schema.Resource { return &schema.Resource{ Create: resourceBigqueryDataTransferConfigCreate, @@ -55,7 +90,7 @@ func resourceBigqueryDataTransferConfig() *schema.Resource { Delete: schema.DefaultTimeout(20 * time.Minute), }, - CustomizeDiff: sensitiveParamCustomizeDiff, + CustomizeDiff: customdiff.All(sensitiveParamCustomizeDiff, paramsCustomizeDiff), Schema: map[string]*schema.Schema{ "data_source_id": { diff --git a/google/resource_bigquery_data_transfer_config_test.go b/google/resource_bigquery_data_transfer_config_test.go index 90feba91248..81424c158e1 100644 --- a/google/resource_bigquery_data_transfer_config_test.go +++ b/google/resource_bigquery_data_transfer_config_test.go @@ -10,6 +10,144 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" ) +func TestBigqueryDataTransferConfig_resourceBigqueryDTCParamsCustomDiffFuncForceNew(t *testing.T) { + t.Parallel() + + cases := map[string]struct { + before map[string]interface{} + after map[string]interface{} + forcenew bool + }{ + "changing_data_path_template": { + before: map[string]interface{}{ + "data_source_id": "google_cloud_storage", + "params": map[string]interface{}{ + "data_path_template": "gs://bq-bucket-temp/*.json", + "destination_table_name_template": "table-old", + "file_format": "JSON", + "max_bad_records": 10, + "write_disposition": "APPEND", + }, + }, + after: map[string]interface{}{ + "data_source_id": "google_cloud_storage", + "params": map[string]interface{}{ + "data_path_template": "gs://bq-bucket-temp-new/*.json", + "destination_table_name_template": "table-old", + "file_format": "JSON", + "max_bad_records": 10, + "write_disposition": "APPEND", + }, + }, + forcenew: true, + }, + "changing_destination_table_name_template": { + before: map[string]interface{}{ + "data_source_id": "google_cloud_storage", + "params": map[string]interface{}{ + "data_path_template": "gs://bq-bucket-temp/*.json", + "destination_table_name_template": "table-old", + "file_format": "JSON", + "max_bad_records": 10, + "write_disposition": "APPEND", + }, + }, + after: map[string]interface{}{ + "data_source_id": "google_cloud_storage", + "params": map[string]interface{}{ + "data_path_template": "gs://bq-bucket-temp/*.json", + "destination_table_name_template": "table-new", + "file_format": "JSON", + "max_bad_records": 10, + "write_disposition": "APPEND", + }, + }, + forcenew: true, + }, + "changing_non_force_new_fields": { + before: map[string]interface{}{ + "data_source_id": "google_cloud_storage", + "params": map[string]interface{}{ + "data_path_template": "gs://bq-bucket-temp/*.json", + "destination_table_name_template": "table-old", + "file_format": "JSON", + "max_bad_records": 10, + "write_disposition": "APPEND", + }, + }, + after: map[string]interface{}{ + "data_source_id": "google_cloud_storage", + "params": map[string]interface{}{ + "data_path_template": "gs://bq-bucket-temp/*.json", + "destination_table_name_template": "table-old", + "file_format": "JSON", + "max_bad_records": 1000, + "write_disposition": "APPEND", + }, + }, + forcenew: false, + }, + "changing_destination_table_name_template_for_different_data_source_id": { + before: map[string]interface{}{ + "data_source_id": "scheduled_query", + "params": map[string]interface{}{ + "destination_table_name_template": "table-old", + "query": "SELECT 1 AS a", + "write_disposition": "WRITE_APPEND", + }, + }, + after: map[string]interface{}{ + "data_source_id": "scheduled_query", + "params": map[string]interface{}{ + "destination_table_name_template": "table-new", + "query": "SELECT 1 AS a", + "write_disposition": "WRITE_APPEND", + }, + }, + forcenew: false, + }, + "changing_data_path_template_for_different_data_source_id": { + before: map[string]interface{}{ + "data_source_id": "scheduled_query", + "params": map[string]interface{}{ + "data_path_template": "gs://bq-bucket/*.json", + "query": "SELECT 1 AS a", + "write_disposition": "WRITE_APPEND", + }, + }, + after: map[string]interface{}{ + "data_source_id": "scheduled_query", + "params": map[string]interface{}{ + "data_path_template": "gs://bq-bucket-new/*.json", + "query": "SELECT 1 AS a", + "write_disposition": "WRITE_APPEND", + }, + }, + forcenew: false, + }, + } + + for tn, tc := range cases { + d := &ResourceDiffMock{ + Before: map[string]interface{}{ + "params": tc.before["params"], + "data_source_id": tc.before["data_source_id"], + }, + After: map[string]interface{}{ + "params": tc.after["params"], + "data_source_id": tc.after["data_source_id"], + }, + } + err := paramsCustomizeDiffFunc(d) + if err != nil { + t.Errorf("failed, expected no error but received - %s for the condition %s", err, tn) + } + if d.IsForceNew != tc.forcenew { + t.Errorf("ForceNew not setup correctly for the condition-'%s', expected:%v; actual:%v", tn, tc.forcenew, d.IsForceNew) + } + } +} + // The service account TF uses needs the permission granted in the configs // but it will get deleted by parallel tests, so they need to be run serially. func TestAccBigqueryDataTransferConfig(t *testing.T) { @@ -19,6 +157,7 @@ func TestAccBigqueryDataTransferConfig(t *testing.T) { "service_account": testAccBigqueryDataTransferConfig_scheduledQuery_with_service_account, "no_destintation": testAccBigqueryDataTransferConfig_scheduledQuery_no_destination, "booleanParam": testAccBigqueryDataTransferConfig_copy_booleanParam, + "update_params": testAccBigqueryDataTransferConfig_force_new_update_params, } for name, tc := range testCases { @@ -168,6 +307,45 @@ func testAccBigqueryDataTransferConfig_copy_booleanParam(t *testing.T) { }) } +func testAccBigqueryDataTransferConfig_force_new_update_params(t *testing.T) { + random_suffix := randString(t, 10) + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckBigqueryDataTransferConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, "old", "old"), + }, + { + ResourceName: "google_bigquery_data_transfer_config.update_config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location"}, + }, + { + Config: testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, "new", "old"), + }, + { + ResourceName: "google_bigquery_data_transfer_config.update_config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location"}, + }, + { + Config: testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, "new", "new"), + }, + { + ResourceName: "google_bigquery_data_transfer_config.update_config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location"}, + }, + }, + }) +} + func testAccCheckBigqueryDataTransferConfigDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { for name, rs := range s.RootModule().Resources { @@ -369,3 +547,29 @@ resource "google_bigquery_data_transfer_config" "copy_config" { } `, random_suffix, random_suffix, random_suffix) } + +func testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, path, table string) string { + return fmt.Sprintf(` +resource "google_bigquery_dataset" "dataset" { + dataset_id = "tf_test_%s" + friendly_name = "foo" + description = "bar" + location = "US" +} + +resource "google_bigquery_data_transfer_config" "update_config" { + display_name = "tf-test-%s" + data_source_id = "google_cloud_storage" + destination_dataset_id = google_bigquery_dataset.dataset.dataset_id + location = google_bigquery_dataset.dataset.location + + params = { + data_path_template = "gs://bq-bucket-%s-%s/*.json" + destination_table_name_template = "the-table-%s-%s" + file_format = "JSON" + max_bad_records = 0 + write_disposition = "APPEND" + } +} +`, random_suffix, random_suffix, random_suffix, path, random_suffix, table) +}