Skip to content

Commit

Permalink
Revert "Added customize diff for params field in google_bigquery_data…
Browse files Browse the repository at this point in the history
…_transfer_config (#6678)" (#6737) (#12885)

This reverts commit 59a5d687c347a3b6d33636620c9057d3145e506a.

Signed-off-by: Modular Magician <magic-modules@google.com>

Signed-off-by: Modular Magician <magic-modules@google.com>
  • Loading branch information
modular-magician committed Oct 26, 2022
1 parent 3fafbdf commit d47c4d6
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 93 deletions.
2 changes: 2 additions & 0 deletions .changelog/6737.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
```release-note:none
```
28 changes: 1 addition & 27 deletions google/resource_bigquery_data_transfer_config.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import (
"strings"
"time"

"github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
)

Expand All @@ -39,31 +38,6 @@ func sensitiveParamCustomizeDiff(_ context.Context, diff *schema.ResourceDiff, v
return nil
}

func paramsCustomizeDiff(_ context.Context, diff *schema.ResourceDiff, v interface{}) error {
old, new := diff.GetChange("params")
oldParams := old.(map[string]interface{})
newParams := new.(map[string]interface{})
var err error

if oldParams["data_path_template"] != nil && newParams["data_path_template"] != nil && oldParams["data_path_template"].(string) != newParams["data_path_template"].(string) {
err = diff.ForceNew("params")
if err != nil {
return fmt.Errorf("ForceNew failed for params, old - %v and new - %v", oldParams, newParams)
}
return nil
}

if oldParams["destination_table_name_template"] != nil && newParams["destination_table_name_template"] != nil && oldParams["destination_table_name_template"].(string) != newParams["destination_table_name_template"].(string) {
err = diff.ForceNew("params")
if err != nil {
return fmt.Errorf("ForceNew failed for params, old - %v and new - %v", oldParams, newParams)
}
return nil
}

return nil
}

func resourceBigqueryDataTransferConfig() *schema.Resource {
return &schema.Resource{
Create: resourceBigqueryDataTransferConfigCreate,
Expand All @@ -81,7 +55,7 @@ func resourceBigqueryDataTransferConfig() *schema.Resource {
Delete: schema.DefaultTimeout(20 * time.Minute),
},

CustomizeDiff: customdiff.All(sensitiveParamCustomizeDiff, paramsCustomizeDiff),
CustomizeDiff: sensitiveParamCustomizeDiff,

Schema: map[string]*schema.Schema{
"data_source_id": {
Expand Down
66 changes: 0 additions & 66 deletions google/resource_bigquery_data_transfer_config_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ func TestAccBigqueryDataTransferConfig(t *testing.T) {
"service_account": testAccBigqueryDataTransferConfig_scheduledQuery_with_service_account,
"no_destintation": testAccBigqueryDataTransferConfig_scheduledQuery_no_destination,
"booleanParam": testAccBigqueryDataTransferConfig_copy_booleanParam,
"update_params": testAccBigqueryDataTransferConfig_force_new_update_params,
}

for name, tc := range testCases {
Expand Down Expand Up @@ -169,45 +168,6 @@ func testAccBigqueryDataTransferConfig_copy_booleanParam(t *testing.T) {
})
}

func testAccBigqueryDataTransferConfig_force_new_update_params(t *testing.T) {
random_suffix := randString(t, 10)

vcrTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckBigqueryDataTransferConfigDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, "old", "old"),
},
{
ResourceName: "google_bigquery_data_transfer_config.update_config",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"location"},
},
{
Config: testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, "new", "old"),
},
{
ResourceName: "google_bigquery_data_transfer_config.update_config",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"location"},
},
{
Config: testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, "new", "new"),
},
{
ResourceName: "google_bigquery_data_transfer_config.update_config",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"location"},
},
},
})
}

func testAccCheckBigqueryDataTransferConfigDestroyProducer(t *testing.T) func(s *terraform.State) error {
return func(s *terraform.State) error {
for name, rs := range s.RootModule().Resources {
Expand Down Expand Up @@ -409,29 +369,3 @@ resource "google_bigquery_data_transfer_config" "copy_config" {
}
`, random_suffix, random_suffix, random_suffix)
}

func testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, path, table string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "dataset" {
dataset_id = "tf_test_%s"
friendly_name = "foo"
description = "bar"
location = "US"
}
resource "google_bigquery_data_transfer_config" "update_config" {
display_name = "tf-test-%s"
data_source_id = "google_cloud_storage"
destination_dataset_id = google_bigquery_dataset.dataset.dataset_id
location = google_bigquery_dataset.dataset.location
params = {
data_path_template = "gs://bq-bucket-%s-%s/*.json"
destination_table_name_template = "the-table-%s-%s"
file_format = "JSON"
max_bad_records = 0
write_disposition = "APPEND"
}
}
`, random_suffix, random_suffix, random_suffix, path, random_suffix, table)
}

0 comments on commit d47c4d6

Please sign in to comment.