Skip to content

Commit

Permalink
Added customizeDiff for a couple of subfields of params field in goog…
Browse files Browse the repository at this point in the history
…le_bigquery_data_transfer_config (#6784) (#13137)

* Added customize diff for params field in google_bigquery_data_transfer_config

* Added test cases for params field for google_bigquery_data_transfer_config

* Added unit tests to cover ForceNew behaviour of params field

* Added handling for customizeDiff in google_bigquery_data_transfer_config

* Added comments for parmasCustomizeDiff function in google_bigquery_data_transfer_config

* Added test case for different data_source_id in resource_bigquery_data_transfer_config

* Updated test case for different data_source_id in resource_bigquery_data_transfer_config

* Updated error message for resource_bigquery_data_transfer_config_test.go

* Updated test case for resource_bigquery_data_transfer_config_test.go

Signed-off-by: Modular Magician <magic-modules@google.com>

Signed-off-by: Modular Magician <magic-modules@google.com>
  • Loading branch information
modular-magician committed Nov 29, 2022
1 parent 50e1f7d commit 538b863
Show file tree
Hide file tree
Showing 3 changed files with 243 additions and 1 deletion.
3 changes: 3 additions & 0 deletions .changelog/6784.txt
@@ -0,0 +1,3 @@
```release-note:enhancement
bigquerydatatransfer: recreate `google_bigquery_data_transfer_config` for Cloud Storage transfers when immutable params `data_path_template` and `destination_table_name_template` are changed
```
37 changes: 36 additions & 1 deletion google/resource_bigquery_data_transfer_config.go
Expand Up @@ -22,6 +22,7 @@ import (
"strings"
"time"

"github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
)

Expand All @@ -38,6 +39,40 @@ func sensitiveParamCustomizeDiff(_ context.Context, diff *schema.ResourceDiff, v
return nil
}

// This customizeDiff is to use ForceNew for params fields data_path_template and
// destination_table_name_template only if the value of "data_source_id" is "google_cloud_storage".
func paramsCustomizeDiffFunc(diff TerraformResourceDiff) error {
old, new := diff.GetChange("params")
dsId := diff.Get("data_source_id").(string)
oldParams := old.(map[string]interface{})
newParams := new.(map[string]interface{})
var err error

if dsId == "google_cloud_storage" {
if oldParams["data_path_template"] != nil && newParams["data_path_template"] != nil && oldParams["data_path_template"].(string) != newParams["data_path_template"].(string) {
err = diff.ForceNew("params")
if err != nil {
return fmt.Errorf("ForceNew failed for params, old - %v and new - %v", oldParams, newParams)
}
return nil
}

if oldParams["destination_table_name_template"] != nil && newParams["destination_table_name_template"] != nil && oldParams["destination_table_name_template"].(string) != newParams["destination_table_name_template"].(string) {
err = diff.ForceNew("params")
if err != nil {
return fmt.Errorf("ForceNew failed for params, old - %v and new - %v", oldParams, newParams)
}
return nil
}
}

return nil
}

func paramsCustomizeDiff(_ context.Context, diff *schema.ResourceDiff, v interface{}) error {
return paramsCustomizeDiffFunc(diff)
}

func resourceBigqueryDataTransferConfig() *schema.Resource {
return &schema.Resource{
Create: resourceBigqueryDataTransferConfigCreate,
Expand All @@ -55,7 +90,7 @@ func resourceBigqueryDataTransferConfig() *schema.Resource {
Delete: schema.DefaultTimeout(20 * time.Minute),
},

CustomizeDiff: sensitiveParamCustomizeDiff,
CustomizeDiff: customdiff.All(sensitiveParamCustomizeDiff, paramsCustomizeDiff),

Schema: map[string]*schema.Schema{
"data_source_id": {
Expand Down
204 changes: 204 additions & 0 deletions google/resource_bigquery_data_transfer_config_test.go
Expand Up @@ -10,6 +10,144 @@ import (
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
)

func TestBigqueryDataTransferConfig_resourceBigqueryDTCParamsCustomDiffFuncForceNew(t *testing.T) {
t.Parallel()

cases := map[string]struct {
before map[string]interface{}
after map[string]interface{}
forcenew bool
}{
"changing_data_path_template": {
before: map[string]interface{}{
"data_source_id": "google_cloud_storage",
"params": map[string]interface{}{
"data_path_template": "gs://bq-bucket-temp/*.json",
"destination_table_name_template": "table-old",
"file_format": "JSON",
"max_bad_records": 10,
"write_disposition": "APPEND",
},
},
after: map[string]interface{}{
"data_source_id": "google_cloud_storage",
"params": map[string]interface{}{
"data_path_template": "gs://bq-bucket-temp-new/*.json",
"destination_table_name_template": "table-old",
"file_format": "JSON",
"max_bad_records": 10,
"write_disposition": "APPEND",
},
},
forcenew: true,
},
"changing_destination_table_name_template": {
before: map[string]interface{}{
"data_source_id": "google_cloud_storage",
"params": map[string]interface{}{
"data_path_template": "gs://bq-bucket-temp/*.json",
"destination_table_name_template": "table-old",
"file_format": "JSON",
"max_bad_records": 10,
"write_disposition": "APPEND",
},
},
after: map[string]interface{}{
"data_source_id": "google_cloud_storage",
"params": map[string]interface{}{
"data_path_template": "gs://bq-bucket-temp/*.json",
"destination_table_name_template": "table-new",
"file_format": "JSON",
"max_bad_records": 10,
"write_disposition": "APPEND",
},
},
forcenew: true,
},
"changing_non_force_new_fields": {
before: map[string]interface{}{
"data_source_id": "google_cloud_storage",
"params": map[string]interface{}{
"data_path_template": "gs://bq-bucket-temp/*.json",
"destination_table_name_template": "table-old",
"file_format": "JSON",
"max_bad_records": 10,
"write_disposition": "APPEND",
},
},
after: map[string]interface{}{
"data_source_id": "google_cloud_storage",
"params": map[string]interface{}{
"data_path_template": "gs://bq-bucket-temp/*.json",
"destination_table_name_template": "table-old",
"file_format": "JSON",
"max_bad_records": 1000,
"write_disposition": "APPEND",
},
},
forcenew: false,
},
"changing_destination_table_name_template_for_different_data_source_id": {
before: map[string]interface{}{
"data_source_id": "scheduled_query",
"params": map[string]interface{}{
"destination_table_name_template": "table-old",
"query": "SELECT 1 AS a",
"write_disposition": "WRITE_APPEND",
},
},
after: map[string]interface{}{
"data_source_id": "scheduled_query",
"params": map[string]interface{}{
"destination_table_name_template": "table-new",
"query": "SELECT 1 AS a",
"write_disposition": "WRITE_APPEND",
},
},
forcenew: false,
},
"changing_data_path_template_for_different_data_source_id": {
before: map[string]interface{}{
"data_source_id": "scheduled_query",
"params": map[string]interface{}{
"data_path_template": "gs://bq-bucket/*.json",
"query": "SELECT 1 AS a",
"write_disposition": "WRITE_APPEND",
},
},
after: map[string]interface{}{
"data_source_id": "scheduled_query",
"params": map[string]interface{}{
"data_path_template": "gs://bq-bucket-new/*.json",
"query": "SELECT 1 AS a",
"write_disposition": "WRITE_APPEND",
},
},
forcenew: false,
},
}

for tn, tc := range cases {
d := &ResourceDiffMock{
Before: map[string]interface{}{
"params": tc.before["params"],
"data_source_id": tc.before["data_source_id"],
},
After: map[string]interface{}{
"params": tc.after["params"],
"data_source_id": tc.after["data_source_id"],
},
}
err := paramsCustomizeDiffFunc(d)
if err != nil {
t.Errorf("failed, expected no error but received - %s for the condition %s", err, tn)
}
if d.IsForceNew != tc.forcenew {
t.Errorf("ForceNew not setup correctly for the condition-'%s', expected:%v; actual:%v", tn, tc.forcenew, d.IsForceNew)
}
}
}

// The service account TF uses needs the permission granted in the configs
// but it will get deleted by parallel tests, so they need to be run serially.
func TestAccBigqueryDataTransferConfig(t *testing.T) {
Expand All @@ -19,6 +157,7 @@ func TestAccBigqueryDataTransferConfig(t *testing.T) {
"service_account": testAccBigqueryDataTransferConfig_scheduledQuery_with_service_account,
"no_destintation": testAccBigqueryDataTransferConfig_scheduledQuery_no_destination,
"booleanParam": testAccBigqueryDataTransferConfig_copy_booleanParam,
"update_params": testAccBigqueryDataTransferConfig_force_new_update_params,
}

for name, tc := range testCases {
Expand Down Expand Up @@ -168,6 +307,45 @@ func testAccBigqueryDataTransferConfig_copy_booleanParam(t *testing.T) {
})
}

func testAccBigqueryDataTransferConfig_force_new_update_params(t *testing.T) {
random_suffix := randString(t, 10)

vcrTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckBigqueryDataTransferConfigDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, "old", "old"),
},
{
ResourceName: "google_bigquery_data_transfer_config.update_config",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"location"},
},
{
Config: testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, "new", "old"),
},
{
ResourceName: "google_bigquery_data_transfer_config.update_config",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"location"},
},
{
Config: testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, "new", "new"),
},
{
ResourceName: "google_bigquery_data_transfer_config.update_config",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"location"},
},
},
})
}

func testAccCheckBigqueryDataTransferConfigDestroyProducer(t *testing.T) func(s *terraform.State) error {
return func(s *terraform.State) error {
for name, rs := range s.RootModule().Resources {
Expand Down Expand Up @@ -369,3 +547,29 @@ resource "google_bigquery_data_transfer_config" "copy_config" {
}
`, random_suffix, random_suffix, random_suffix)
}

func testAccBigqueryDataTransferConfig_update_params_force_new(random_suffix, path, table string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "dataset" {
dataset_id = "tf_test_%s"
friendly_name = "foo"
description = "bar"
location = "US"
}
resource "google_bigquery_data_transfer_config" "update_config" {
display_name = "tf-test-%s"
data_source_id = "google_cloud_storage"
destination_dataset_id = google_bigquery_dataset.dataset.dataset_id
location = google_bigquery_dataset.dataset.location
params = {
data_path_template = "gs://bq-bucket-%s-%s/*.json"
destination_table_name_template = "the-table-%s-%s"
file_format = "JSON"
max_bad_records = 0
write_disposition = "APPEND"
}
}
`, random_suffix, random_suffix, random_suffix, path, random_suffix, table)
}

0 comments on commit 538b863

Please sign in to comment.