From 7a2a52977885ae8ad4ad3fefa77e4b2fd81189d7 Mon Sep 17 00:00:00 2001 From: Modular Magician Date: Thu, 3 Nov 2022 19:54:23 +0000 Subject: [PATCH] Add resource google_storage_transfer_agent_pool (#6705) Signed-off-by: Modular Magician --- .changelog/6705.txt | 3 + google/config.go | 8 +- google/provider.go | 16 +- google/provider_handwritten_endpoint.go | 10 - .../resource_storage_transfer_agent_pool.go | 407 ++++++++++++++++++ ...torage_transfer_agent_pool_sweeper_test.go | 124 ++++++ ...source_storage_transfer_agent_pool_test.go | 191 ++++++++ .../storage_transfer_agent_pool.html.markdown | 128 ++++++ 8 files changed, 869 insertions(+), 18 deletions(-) create mode 100644 .changelog/6705.txt create mode 100644 google/resource_storage_transfer_agent_pool.go create mode 100644 google/resource_storage_transfer_agent_pool_sweeper_test.go create mode 100644 google/resource_storage_transfer_agent_pool_test.go create mode 100644 website/docs/r/storage_transfer_agent_pool.html.markdown diff --git a/.changelog/6705.txt b/.changelog/6705.txt new file mode 100644 index 00000000000..6b9ae28fc2d --- /dev/null +++ b/.changelog/6705.txt @@ -0,0 +1,3 @@ +```release-note:new-resource +`google_storage_transfer_agent_pool` +``` diff --git a/google/config.go b/google/config.go index 9f511813c8f..b978f5a3013 100644 --- a/google/config.go +++ b/google/config.go @@ -242,6 +242,7 @@ type Config struct { SpannerBasePath string SQLBasePath string StorageBasePath string + StorageTransferBasePath string TagsBasePath string TPUBasePath string VertexAIBasePath string @@ -257,7 +258,6 @@ type Config struct { IAMBasePath string CloudIoTBasePath string ServiceNetworkingBasePath string - StorageTransferBasePath string BigtableAdminBasePath string // dcl @@ -339,6 +339,7 @@ const SourceRepoBasePathKey = "SourceRepo" const SpannerBasePathKey = "Spanner" const SQLBasePathKey = "SQL" const StorageBasePathKey = "Storage" +const StorageTransferBasePathKey = "StorageTransfer" const TagsBasePathKey = "Tags" const TPUBasePathKey = "TPU" const VertexAIBasePathKey = "VertexAI" @@ -352,7 +353,6 @@ const IAMBasePathKey = "IAM" const IamCredentialsBasePathKey = "IamCredentials" const ResourceManagerV3BasePathKey = "ResourceManagerV3" const ServiceNetworkingBasePathKey = "ServiceNetworking" -const StorageTransferBasePathKey = "StorageTransfer" const BigtableAdminBasePathKey = "BigtableAdmin" const ContainerAwsBasePathKey = "ContainerAws" const ContainerAzureBasePathKey = "ContainerAzure" @@ -430,6 +430,7 @@ var DefaultBasePaths = map[string]string{ SpannerBasePathKey: "https://spanner.googleapis.com/v1/", SQLBasePathKey: "https://sqladmin.googleapis.com/sql/v1beta4/", StorageBasePathKey: "https://storage.googleapis.com/storage/v1/", + StorageTransferBasePathKey: "https://storagetransfer.googleapis.com/v1/", TagsBasePathKey: "https://cloudresourcemanager.googleapis.com/v3/", TPUBasePathKey: "https://tpu.googleapis.com/v1/", VertexAIBasePathKey: "https://{{region}}-aiplatform.googleapis.com/v1/", @@ -443,7 +444,6 @@ var DefaultBasePaths = map[string]string{ IamCredentialsBasePathKey: "https://iamcredentials.googleapis.com/v1/", ResourceManagerV3BasePathKey: "https://cloudresourcemanager.googleapis.com/v3/", ServiceNetworkingBasePathKey: "https://servicenetworking.googleapis.com/v1/", - StorageTransferBasePathKey: "https://storagetransfer.googleapis.com/v1/", BigtableAdminBasePathKey: "https://bigtableadmin.googleapis.com/v2/", ContainerAwsBasePathKey: "https://{{location}}-gkemulticloud.googleapis.com/v1/", ContainerAzureBasePathKey: "https://{{location}}-gkemulticloud.googleapis.com/v1/", @@ -1283,6 +1283,7 @@ func ConfigureBasePaths(c *Config) { c.SpannerBasePath = DefaultBasePaths[SpannerBasePathKey] c.SQLBasePath = DefaultBasePaths[SQLBasePathKey] c.StorageBasePath = DefaultBasePaths[StorageBasePathKey] + c.StorageTransferBasePath = DefaultBasePaths[StorageTransferBasePathKey] c.TagsBasePath = DefaultBasePaths[TagsBasePathKey] c.TPUBasePath = DefaultBasePaths[TPUBasePathKey] c.VertexAIBasePath = DefaultBasePaths[VertexAIBasePathKey] @@ -1300,6 +1301,5 @@ func ConfigureBasePaths(c *Config) { c.IAMBasePath = DefaultBasePaths[IAMBasePathKey] c.ServiceNetworkingBasePath = DefaultBasePaths[ServiceNetworkingBasePathKey] c.BigQueryBasePath = DefaultBasePaths[BigQueryBasePathKey] - c.StorageTransferBasePath = DefaultBasePaths[StorageTransferBasePathKey] c.BigtableAdminBasePath = DefaultBasePaths[BigtableAdminBasePathKey] } diff --git a/google/provider.go b/google/provider.go index ab7978bb63f..9102558dbb9 100644 --- a/google/provider.go +++ b/google/provider.go @@ -717,6 +717,14 @@ func Provider() *schema.Provider { "GOOGLE_STORAGE_CUSTOM_ENDPOINT", }, DefaultBasePaths[StorageBasePathKey]), }, + "storage_transfer_custom_endpoint": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validateCustomEndpoint, + DefaultFunc: schema.MultiEnvDefaultFunc([]string{ + "GOOGLE_STORAGE_TRANSFER_CUSTOM_ENDPOINT", + }, DefaultBasePaths[StorageTransferBasePathKey]), + }, "tags_custom_endpoint": { Type: schema.TypeString, Optional: true, @@ -768,7 +776,6 @@ func Provider() *schema.Provider { IAMCustomEndpointEntryKey: IAMCustomEndpointEntry, ServiceNetworkingCustomEndpointEntryKey: ServiceNetworkingCustomEndpointEntry, ServiceUsageCustomEndpointEntryKey: ServiceUsageCustomEndpointEntry, - StorageTransferCustomEndpointEntryKey: StorageTransferCustomEndpointEntry, BigtableAdminCustomEndpointEntryKey: BigtableAdminCustomEndpointEntry, // dcl @@ -910,9 +917,9 @@ func Provider() *schema.Provider { return provider } -// Generated resources: 244 +// Generated resources: 245 // Generated IAM resources: 150 -// Total generated resources: 394 +// Total generated resources: 395 func ResourceMap() map[string]*schema.Resource { resourceMap, _ := ResourceMapWithErrors() return resourceMap @@ -1286,6 +1293,7 @@ func ResourceMapWithErrors() (map[string]*schema.Resource, error) { "google_storage_object_access_control": resourceStorageObjectAccessControl(), "google_storage_default_object_access_control": resourceStorageDefaultObjectAccessControl(), "google_storage_hmac_key": resourceStorageHmacKey(), + "google_storage_transfer_agent_pool": resourceStorageTransferAgentPool(), "google_tags_tag_key": resourceTagsTagKey(), "google_tags_tag_key_iam_binding": ResourceIamBinding(TagsTagKeyIamSchema, TagsTagKeyIamUpdaterProducer, TagsTagKeyIdParseFunc), "google_tags_tag_key_iam_member": ResourceIamMember(TagsTagKeyIamSchema, TagsTagKeyIamUpdaterProducer, TagsTagKeyIdParseFunc), @@ -1597,6 +1605,7 @@ func providerConfigure(ctx context.Context, d *schema.ResourceData, p *schema.Pr config.SpannerBasePath = d.Get("spanner_custom_endpoint").(string) config.SQLBasePath = d.Get("sql_custom_endpoint").(string) config.StorageBasePath = d.Get("storage_custom_endpoint").(string) + config.StorageTransferBasePath = d.Get("storage_transfer_custom_endpoint").(string) config.TagsBasePath = d.Get("tags_custom_endpoint").(string) config.TPUBasePath = d.Get("tpu_custom_endpoint").(string) config.VertexAIBasePath = d.Get("vertex_ai_custom_endpoint").(string) @@ -1613,7 +1622,6 @@ func providerConfigure(ctx context.Context, d *schema.ResourceData, p *schema.Pr config.IAMBasePath = d.Get(IAMCustomEndpointEntryKey).(string) config.ServiceNetworkingBasePath = d.Get(ServiceNetworkingCustomEndpointEntryKey).(string) config.ServiceUsageBasePath = d.Get(ServiceUsageCustomEndpointEntryKey).(string) - config.StorageTransferBasePath = d.Get(StorageTransferCustomEndpointEntryKey).(string) config.BigtableAdminBasePath = d.Get(BigtableAdminCustomEndpointEntryKey).(string) // dcl diff --git a/google/provider_handwritten_endpoint.go b/google/provider_handwritten_endpoint.go index 5a3a4ea9bb8..e8af9921bb8 100644 --- a/google/provider_handwritten_endpoint.go +++ b/google/provider_handwritten_endpoint.go @@ -98,16 +98,6 @@ var ServiceUsageCustomEndpointEntry = &schema.Schema{ }, DefaultBasePaths[ServiceUsageBasePathKey]), } -var StorageTransferCustomEndpointEntryKey = "storage_transfer_custom_endpoint" -var StorageTransferCustomEndpointEntry = &schema.Schema{ - Type: schema.TypeString, - Optional: true, - ValidateFunc: validateCustomEndpoint, - DefaultFunc: schema.MultiEnvDefaultFunc([]string{ - "GOOGLE_STORAGE_TRANSFER_CUSTOM_ENDPOINT", - }, DefaultBasePaths[StorageTransferBasePathKey]), -} - var BigtableAdminCustomEndpointEntryKey = "bigtable_custom_endpoint" var BigtableAdminCustomEndpointEntry = &schema.Schema{ Type: schema.TypeString, diff --git a/google/resource_storage_transfer_agent_pool.go b/google/resource_storage_transfer_agent_pool.go new file mode 100644 index 00000000000..dbb15325112 --- /dev/null +++ b/google/resource_storage_transfer_agent_pool.go @@ -0,0 +1,407 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package google + +import ( + "fmt" + "log" + "reflect" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +// waitForAgentPoolReady waits for an agent pool to leave the +// "CREATING" state and become "CREATED", to indicate that it's ready. +func waitForAgentPoolReady(d *schema.ResourceData, config *Config, timeout time.Duration) error { + return resource.Retry(timeout, func() *resource.RetryError { + if err := resourceStorageTransferAgentPoolRead(d, config); err != nil { + return resource.NonRetryableError(err) + } + + name := d.Get("name").(string) + state := d.Get("state").(string) + if state == "CREATING" { + return resource.RetryableError(fmt.Errorf("AgentPool %q has state %q.", name, state)) + } else if state == "CREATED" { + log.Printf("[DEBUG] AgentPool %q has state %q.", name, state) + return nil + } else { + return resource.NonRetryableError(fmt.Errorf("AgentPool %q has state %q.", name, state)) + } + }) +} + +func resourceStorageTransferAgentPool() *schema.Resource { + return &schema.Resource{ + Create: resourceStorageTransferAgentPoolCreate, + Read: resourceStorageTransferAgentPoolRead, + Update: resourceStorageTransferAgentPoolUpdate, + Delete: resourceStorageTransferAgentPoolDelete, + + Importer: &schema.ResourceImporter{ + State: resourceStorageTransferAgentPoolImport, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Update: schema.DefaultTimeout(20 * time.Minute), + Delete: schema.DefaultTimeout(20 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The ID of the agent pool to create. + +The agentPoolId must meet the following requirements: +* Length of 128 characters or less. +* Not start with the string goog. +* Start with a lowercase ASCII character, followed by: + * Zero or more: lowercase Latin alphabet characters, numerals, hyphens (-), periods (.), underscores (_), or tildes (~). + * One or more numerals or lowercase ASCII characters. + +As expressed by the regular expression: ^(?!goog)[a-z]([a-z0-9-._~]*[a-z0-9])?$.`, + }, + "bandwidth_limit": { + Type: schema.TypeList, + Optional: true, + Description: `Specifies the bandwidth limit details. If this field is unspecified, the default value is set as 'No Limit'.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "limit_mbps": { + Type: schema.TypeString, + Required: true, + Description: `Bandwidth rate in megabytes per second, distributed across all the agents in the pool.`, + }, + }, + }, + }, + "display_name": { + Type: schema.TypeString, + Optional: true, + Description: `Specifies the client-specified AgentPool description.`, + }, + "state": { + Type: schema.TypeString, + Computed: true, + Description: `Specifies the state of the AgentPool.`, + }, + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + }, + }, + UseJSONNumber: true, + } +} + +func resourceStorageTransferAgentPoolCreate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + userAgent, err := generateUserAgentString(d, config.userAgent) + if err != nil { + return err + } + + obj := make(map[string]interface{}) + displayNameProp, err := expandStorageTransferAgentPoolDisplayName(d.Get("display_name"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("display_name"); !isEmptyValue(reflect.ValueOf(displayNameProp)) && (ok || !reflect.DeepEqual(v, displayNameProp)) { + obj["displayName"] = displayNameProp + } + bandwidthLimitProp, err := expandStorageTransferAgentPoolBandwidthLimit(d.Get("bandwidth_limit"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("bandwidth_limit"); !isEmptyValue(reflect.ValueOf(bandwidthLimitProp)) && (ok || !reflect.DeepEqual(v, bandwidthLimitProp)) { + obj["bandwidthLimit"] = bandwidthLimitProp + } + + url, err := replaceVars(d, config, "{{StorageTransferBasePath}}projects/{{project}}/agentPools?agentPoolId={{name}}") + if err != nil { + return err + } + + log.Printf("[DEBUG] Creating new AgentPool: %#v", obj) + billingProject := "" + + project, err := getProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for AgentPool: %s", err) + } + billingProject = project + + // err == nil indicates that the billing_project value was found + if bp, err := getBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := sendRequestWithTimeout(config, "POST", billingProject, url, userAgent, obj, d.Timeout(schema.TimeoutCreate)) + if err != nil { + return fmt.Errorf("Error creating AgentPool: %s", err) + } + + // Store the ID now + id, err := replaceVars(d, config, "projects/{{project}}/agentPools/{{name}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + if err := waitForAgentPoolReady(d, config, d.Timeout(schema.TimeoutCreate)-time.Minute); err != nil { + return fmt.Errorf("Error waiting for AgentPool %q to be CREATED during creation: %q", d.Get("name").(string), err) + } + + log.Printf("[DEBUG] Finished creating AgentPool %q: %#v", d.Id(), res) + + return resourceStorageTransferAgentPoolRead(d, meta) +} + +func resourceStorageTransferAgentPoolRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + userAgent, err := generateUserAgentString(d, config.userAgent) + if err != nil { + return err + } + + url, err := replaceVars(d, config, "{{StorageTransferBasePath}}projects/{{project}}/agentPools/{{name}}") + if err != nil { + return err + } + + billingProject := "" + + project, err := getProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for AgentPool: %s", err) + } + billingProject = project + + // err == nil indicates that the billing_project value was found + if bp, err := getBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := sendRequest(config, "GET", billingProject, url, userAgent, nil) + if err != nil { + return handleNotFoundError(err, d, fmt.Sprintf("StorageTransferAgentPool %q", d.Id())) + } + + if err := d.Set("project", project); err != nil { + return fmt.Errorf("Error reading AgentPool: %s", err) + } + + if err := d.Set("display_name", flattenStorageTransferAgentPoolDisplayName(res["displayName"], d, config)); err != nil { + return fmt.Errorf("Error reading AgentPool: %s", err) + } + if err := d.Set("state", flattenStorageTransferAgentPoolState(res["state"], d, config)); err != nil { + return fmt.Errorf("Error reading AgentPool: %s", err) + } + if err := d.Set("bandwidth_limit", flattenStorageTransferAgentPoolBandwidthLimit(res["bandwidthLimit"], d, config)); err != nil { + return fmt.Errorf("Error reading AgentPool: %s", err) + } + + return nil +} + +func resourceStorageTransferAgentPoolUpdate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + userAgent, err := generateUserAgentString(d, config.userAgent) + if err != nil { + return err + } + + billingProject := "" + + project, err := getProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for AgentPool: %s", err) + } + billingProject = project + + obj := make(map[string]interface{}) + displayNameProp, err := expandStorageTransferAgentPoolDisplayName(d.Get("display_name"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("display_name"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, displayNameProp)) { + obj["displayName"] = displayNameProp + } + bandwidthLimitProp, err := expandStorageTransferAgentPoolBandwidthLimit(d.Get("bandwidth_limit"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("bandwidth_limit"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, bandwidthLimitProp)) { + obj["bandwidthLimit"] = bandwidthLimitProp + } + + url, err := replaceVars(d, config, "{{StorageTransferBasePath}}projects/{{project}}/agentPools/{{name}}") + if err != nil { + return err + } + + log.Printf("[DEBUG] Updating AgentPool %q: %#v", d.Id(), obj) + updateMask := []string{} + + if d.HasChange("display_name") { + updateMask = append(updateMask, "displayName") + } + + if d.HasChange("bandwidth_limit") { + updateMask = append(updateMask, "bandwidthLimit") + } + // updateMask is a URL parameter but not present in the schema, so replaceVars + // won't set it + url, err = addQueryParams(url, map[string]string{"updateMask": strings.Join(updateMask, ",")}) + if err != nil { + return err + } + if err := waitForAgentPoolReady(d, config, d.Timeout(schema.TimeoutCreate)-time.Minute); err != nil { + return fmt.Errorf("Error waiting for AgentPool %q to be CREATED before updating: %q", d.Get("name").(string), err) + } + + // err == nil indicates that the billing_project value was found + if bp, err := getBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := sendRequestWithTimeout(config, "PATCH", billingProject, url, userAgent, obj, d.Timeout(schema.TimeoutUpdate)) + + if err != nil { + return fmt.Errorf("Error updating AgentPool %q: %s", d.Id(), err) + } else { + log.Printf("[DEBUG] Finished updating AgentPool %q: %#v", d.Id(), res) + } + + return resourceStorageTransferAgentPoolRead(d, meta) +} + +func resourceStorageTransferAgentPoolDelete(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + userAgent, err := generateUserAgentString(d, config.userAgent) + if err != nil { + return err + } + + billingProject := "" + + project, err := getProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for AgentPool: %s", err) + } + billingProject = project + + url, err := replaceVars(d, config, "{{StorageTransferBasePath}}projects/{{project}}/agentPools/{{name}}") + if err != nil { + return err + } + + var obj map[string]interface{} + log.Printf("[DEBUG] Deleting AgentPool %q", d.Id()) + + // err == nil indicates that the billing_project value was found + if bp, err := getBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := sendRequestWithTimeout(config, "DELETE", billingProject, url, userAgent, obj, d.Timeout(schema.TimeoutDelete)) + if err != nil { + return handleNotFoundError(err, d, "AgentPool") + } + + log.Printf("[DEBUG] Finished deleting AgentPool %q: %#v", d.Id(), res) + return nil +} + +func resourceStorageTransferAgentPoolImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + config := meta.(*Config) + if err := parseImportId([]string{ + "projects/(?P[^/]+)/agentPools/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)", + "(?P[^/]+)", + }, d, config); err != nil { + return nil, err + } + + // Replace import id for the resource id + id, err := replaceVars(d, config, "projects/{{project}}/agentPools/{{name}}") + if err != nil { + return nil, fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + if err := waitForAgentPoolReady(d, config, d.Timeout(schema.TimeoutCreate)-time.Minute); err != nil { + return nil, fmt.Errorf("Error waiting for AgentPool %q to be CREATED during importing: %q", d.Get("name").(string), err) + } + + return []*schema.ResourceData{d}, nil +} + +func flattenStorageTransferAgentPoolDisplayName(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenStorageTransferAgentPoolState(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenStorageTransferAgentPoolBandwidthLimit(v interface{}, d *schema.ResourceData, config *Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["limit_mbps"] = + flattenStorageTransferAgentPoolBandwidthLimitLimitMbps(original["limitMbps"], d, config) + return []interface{}{transformed} +} +func flattenStorageTransferAgentPoolBandwidthLimitLimitMbps(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func expandStorageTransferAgentPoolDisplayName(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandStorageTransferAgentPoolBandwidthLimit(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + l := v.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + raw := l[0] + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedLimitMbps, err := expandStorageTransferAgentPoolBandwidthLimitLimitMbps(original["limit_mbps"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedLimitMbps); val.IsValid() && !isEmptyValue(val) { + transformed["limitMbps"] = transformedLimitMbps + } + + return transformed, nil +} + +func expandStorageTransferAgentPoolBandwidthLimitLimitMbps(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} diff --git a/google/resource_storage_transfer_agent_pool_sweeper_test.go b/google/resource_storage_transfer_agent_pool_sweeper_test.go new file mode 100644 index 00000000000..6ddc04c46be --- /dev/null +++ b/google/resource_storage_transfer_agent_pool_sweeper_test.go @@ -0,0 +1,124 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package google + +import ( + "context" + "log" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func init() { + resource.AddTestSweepers("StorageTransferAgentPool", &resource.Sweeper{ + Name: "StorageTransferAgentPool", + F: testSweepStorageTransferAgentPool, + }) +} + +// At the time of writing, the CI only passes us-central1 as the region +func testSweepStorageTransferAgentPool(region string) error { + resourceName := "StorageTransferAgentPool" + log.Printf("[INFO][SWEEPER_LOG] Starting sweeper for %s", resourceName) + + config, err := sharedConfigForRegion(region) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error getting shared config for region: %s", err) + return err + } + + err = config.LoadAndValidate(context.Background()) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error loading: %s", err) + return err + } + + t := &testing.T{} + billingId := getTestBillingAccountFromEnv(t) + + // Setup variables to replace in list template + d := &ResourceDataMock{ + FieldsInSchema: map[string]interface{}{ + "project": config.Project, + "region": region, + "location": region, + "zone": "-", + "billing_account": billingId, + }, + } + + listTemplate := strings.Split("https://storagetransfer.googleapis.com/v1/projects/{{project}}/agentPools", "?")[0] + listUrl, err := replaceVars(d, config, listTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing sweeper list url: %s", err) + return nil + } + + res, err := sendRequest(config, "GET", config.Project, listUrl, config.userAgent, nil) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error in response from request %s: %s", listUrl, err) + return nil + } + + resourceList, ok := res["agentPools"] + if !ok { + log.Printf("[INFO][SWEEPER_LOG] Nothing found in response.") + return nil + } + + rl := resourceList.([]interface{}) + + log.Printf("[INFO][SWEEPER_LOG] Found %d items in %s list response.", len(rl), resourceName) + // Keep count of items that aren't sweepable for logging. + nonPrefixCount := 0 + for _, ri := range rl { + obj := ri.(map[string]interface{}) + if obj["name"] == nil { + log.Printf("[INFO][SWEEPER_LOG] %s resource name was nil", resourceName) + return nil + } + + name := GetResourceNameFromSelfLink(obj["name"].(string)) + // Skip resources that shouldn't be sweeped + if !isSweepableTestResource(name) { + nonPrefixCount++ + continue + } + + deleteTemplate := "https://storagetransfer.googleapis.com/v1/projects/{{project}}/agentPools/{{name}}" + deleteUrl, err := replaceVars(d, config, deleteTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing delete url: %s", err) + return nil + } + deleteUrl = deleteUrl + name + + // Don't wait on operations as we may have a lot to delete + _, err = sendRequest(config, "DELETE", config.Project, deleteUrl, config.userAgent, nil) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error deleting for url %s : %s", deleteUrl, err) + } else { + log.Printf("[INFO][SWEEPER_LOG] Sent delete request for %s resource: %s", resourceName, name) + } + } + + if nonPrefixCount > 0 { + log.Printf("[INFO][SWEEPER_LOG] %d items were non-sweepable and skipped.", nonPrefixCount) + } + + return nil +} diff --git a/google/resource_storage_transfer_agent_pool_test.go b/google/resource_storage_transfer_agent_pool_test.go new file mode 100644 index 00000000000..f23d0f6f109 --- /dev/null +++ b/google/resource_storage_transfer_agent_pool_test.go @@ -0,0 +1,191 @@ +package google + +import ( + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" +) + +func TestAccStorageTransferAgentPool_agentPoolUpdate(t *testing.T) { + t.Parallel() + + agentPoolName := fmt.Sprintf("tf-test-agent-pool-%s", randString(t, 10)) + displayName := fmt.Sprintf("tf-test-display-name-%s", randString(t, 10)) + displayNameUpdate := fmt.Sprintf("tf-test-display-name-%s", randString(t, 10)) + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckStorageTransferAgentPoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageTransferAgentPool_agentPoolBasic(getTestProjectFromEnv(), agentPoolName, displayName), + }, + { + ResourceName: "google_storage_transfer_agent_pool.foo", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccStorageTransferAgentPool_agentPoolBasic(getTestProjectFromEnv(), agentPoolName, displayNameUpdate), + }, + { + ResourceName: "google_storage_transfer_agent_pool.foo", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccStorageTransferAgentPool_updateLimitMbps(getTestProjectFromEnv(), agentPoolName, displayNameUpdate), + }, + { + ResourceName: "google_storage_transfer_agent_pool.foo", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccStorageTransferAgentPool_omitDisplayName(getTestProjectFromEnv(), agentPoolName), + }, + { + ResourceName: "google_storage_transfer_agent_pool.foo", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccStorageTransferAgentPool_omitBandwidthLimit(getTestProjectFromEnv(), agentPoolName, displayNameUpdate), + }, + { + ResourceName: "google_storage_transfer_agent_pool.foo", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccStorageTransferAgentPool_agentPoolBasic(project, agentPoolName, displayName string) string { + return fmt.Sprintf(` +data "google_storage_transfer_project_service_account" "default" { + project = "%s" +} + +resource "google_project_iam_member" "agent_pool" { + project = "%s" + role = "roles/pubsub.editor" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" +} + +resource "google_storage_transfer_agent_pool" "foo" { + name = "%s" + display_name = "%s" + bandwidth_limit { + limit_mbps = "120" + } + + depends_on = [google_project_iam_member.agent_pool] +} +`, project, project, agentPoolName, displayName) +} + +func testAccStorageTransferAgentPool_updateLimitMbps(project, agentPoolName, displayName string) string { + return fmt.Sprintf(` +data "google_storage_transfer_project_service_account" "default" { + project = "%s" +} + +resource "google_project_iam_member" "agent_pool" { + project = "%s" + role = "roles/pubsub.editor" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" +} + +resource "google_storage_transfer_agent_pool" "foo" { + name = "%s" + display_name = "%s" + bandwidth_limit { + limit_mbps = "150" + } + + depends_on = [google_project_iam_member.agent_pool] +} +`, project, project, agentPoolName, displayName) +} + +func testAccStorageTransferAgentPool_omitDisplayName(project string, agentPoolName string) string { + return fmt.Sprintf(` +data "google_storage_transfer_project_service_account" "default" { + project = "%s" +} + +resource "google_project_iam_member" "agent_pool" { + project = "%s" + role = "roles/pubsub.editor" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" +} + +resource "google_storage_transfer_agent_pool" "foo" { + name = "%s" + bandwidth_limit { + limit_mbps = "120" + } + + depends_on = [google_project_iam_member.agent_pool] +} +`, project, project, agentPoolName) +} + +func testAccStorageTransferAgentPool_omitBandwidthLimit(project string, agentPoolName string, displayName string) string { + return fmt.Sprintf(` +data "google_storage_transfer_project_service_account" "default" { + project = "%s" +} + +resource "google_project_iam_member" "agent_pool" { + project = "%s" + role = "roles/pubsub.editor" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" +} + +resource "google_storage_transfer_agent_pool" "foo" { + name = "%s" + display_name = "%s" + + depends_on = [google_project_iam_member.agent_pool] +} +`, project, project, agentPoolName, displayName) +} + +func testAccCheckStorageTransferAgentPoolDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_storage_transfer_agent_pool" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := googleProviderConfig(t) + + url, err := replaceVarsForTest(config, rs, "{{StorageTransferBasePath}}projects/{{project}}/agentPools/{{name}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = sendRequest(config, "GET", billingProject, url, config.userAgent, nil) + if err == nil { + return fmt.Errorf("StorageTransferAgentPool still exists at %s", url) + } + } + + return nil + } +} diff --git a/website/docs/r/storage_transfer_agent_pool.html.markdown b/website/docs/r/storage_transfer_agent_pool.html.markdown new file mode 100644 index 00000000000..c6114138b39 --- /dev/null +++ b/website/docs/r/storage_transfer_agent_pool.html.markdown @@ -0,0 +1,128 @@ +--- +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** Type: MMv1 *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in +# .github/CONTRIBUTING.md. +# +# ---------------------------------------------------------------------------- +subcategory: "Storage Transfer Service" +page_title: "Google: google_storage_transfer_agent_pool" +description: |- + Represents an On-Premises Agent pool. +--- + +# google\_storage\_transfer\_agent\_pool + +Represents an On-Premises Agent pool. + + +To get more information about AgentPool, see: + +* [API documentation](https://cloud.google.com/storage-transfer/docs/reference/rest/v1/projects.agentPools) +* How-to Guides + * [Official Documentation](https://cloud.google.com/storage-transfer/docs/on-prem-agent-pools) + +## Example Usage - Agent Pool Basic + + +```hcl +data "google_storage_transfer_project_service_account" "default" { + project = "my-project-name" +} + +resource "google_project_iam_member" "pubsub_editor_role" { + project = "my-project-name" + role = "roles/pubsub.editor" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" +} + +resource "google_storage_transfer_agent_pool" "example" { + name = "agent-pool-example" + display_name = "Source A to destination Z" + bandwidth_limit { + limit_mbps = "120" + } + + depends_on = [google_project_iam_member.pubsub_editor_role] +} +``` + +## Argument Reference + +The following arguments are supported: + + +* `name` - + (Required) + The ID of the agent pool to create. + The agentPoolId must meet the following requirements: + * Length of 128 characters or less. + * Not start with the string goog. + * Start with a lowercase ASCII character, followed by: + * Zero or more: lowercase Latin alphabet characters, numerals, hyphens (-), periods (.), underscores (_), or tildes (~). + * One or more numerals or lowercase ASCII characters. + As expressed by the regular expression: ^(?!goog)[a-z]([a-z0-9-._~]*[a-z0-9])?$. + + +- - - + + +* `display_name` - + (Optional) + Specifies the client-specified AgentPool description. + +* `bandwidth_limit` - + (Optional) + Specifies the bandwidth limit details. If this field is unspecified, the default value is set as 'No Limit'. + Structure is [documented below](#nested_bandwidth_limit). + +* `project` - (Optional) The ID of the project in which the resource belongs. + If it is not provided, the provider project is used. + + +The `bandwidth_limit` block supports: + +* `limit_mbps` - + (Required) + Bandwidth rate in megabytes per second, distributed across all the agents in the pool. + +## Attributes Reference + +In addition to the arguments listed above, the following computed attributes are exported: + +* `id` - an identifier for the resource with format `projects/{{project}}/agentPools/{{name}}` + +* `state` - + Specifies the state of the AgentPool. + + +## Timeouts + +This resource provides the following +[Timeouts](/docs/configuration/resources.html#timeouts) configuration options: + +- `create` - Default is 30 minutes. +- `update` - Default is 20 minutes. +- `delete` - Default is 20 minutes. + +## Import + + +AgentPool can be imported using any of these accepted formats: + +``` +$ terraform import google_storage_transfer_agent_pool.default projects/{{project}}/agentPools/{{name}} +$ terraform import google_storage_transfer_agent_pool.default {{project}}/{{name}} +$ terraform import google_storage_transfer_agent_pool.default {{name}} +``` + +## User Project Overrides + +This resource supports [User Project Overrides](https://www.terraform.io/docs/providers/google/guides/provider_reference.html#user_project_override).