Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

New resource - azurerm_data_factory_integration_runtime_self_hosted #6535

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
@@ -0,0 +1,276 @@
package datafactory

import (
"bytes"
"fmt"
"regexp"
"time"

"github.com/Azure/azure-sdk-for-go/services/datafactory/mgmt/2018-06-01/datafactory"
"github.com/hashicorp/terraform-plugin-sdk/helper/hashcode"
"github.com/hashicorp/terraform-plugin-sdk/helper/schema"
"github.com/hashicorp/terraform-plugin-sdk/helper/validation"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/validate"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/features"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datafactory/parse"
azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts"
"github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils"
)

func resourceArmDataFactoryIntegrationRuntimeSelfHosted() *schema.Resource {
return &schema.Resource{
Create: resourceArmDataFactoryIntegrationRuntimeSelfHostedCreateUpdate,
Read: resourceArmDataFactoryIntegrationRuntimeSelfHostedRead,
Update: resourceArmDataFactoryIntegrationRuntimeSelfHostedCreateUpdate,
Delete: resourceArmDataFactoryIntegrationRuntimeSelfHostedDelete,

Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error {
_, err := parse.DataFactoryIntegrationRuntimeID(id)
return err
}),

Timeouts: &schema.ResourceTimeout{
Create: schema.DefaultTimeout(30 * time.Minute),
Read: schema.DefaultTimeout(5 * time.Minute),
Update: schema.DefaultTimeout(30 * time.Minute),
Delete: schema.DefaultTimeout(30 * time.Minute),
},

Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
ValidateFunc: validation.StringMatch(
regexp.MustCompile(`^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$`),
`Invalid name for Self-Hosted Integration Runtime: minimum 3 characters, must start and end with a number or a letter, may only consist of letters, numbers and dashes and no consecutive dashes.`,
),
},

"data_factory_name": {
Type: schema.TypeString,
Required: true,
ForceNew: true,
ValidateFunc: validate.DataFactoryName(),
},

"resource_group_name": azure.SchemaResourceGroupName(),

"description": {
Type: schema.TypeString,
Optional: true,
},

"rbac_authorization": {
Type: schema.TypeSet,
Optional: true,
ForceNew: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"resource_id": {
Type: schema.TypeString,
Required: true,
ValidateFunc: validation.StringIsNotEmpty,
},
},
},
},

"auth_key_1": {
Type: schema.TypeString,
Computed: true,
},

"auth_key_2": {
Type: schema.TypeString,
Computed: true,
},
},
}
}

func resourceArmDataFactoryIntegrationRuntimeSelfHostedCreateUpdate(d *schema.ResourceData, meta interface{}) error {
client := meta.(*clients.Client).DataFactory.IntegrationRuntimesClient
ctx, cancel := timeouts.ForCreateUpdate(meta.(*clients.Client).StopContext, d)
defer cancel()

name := d.Get("name").(string)
factoryName := d.Get("data_factory_name").(string)
resourceGroup := d.Get("resource_group_name").(string)

if features.ShouldResourcesBeImported() && d.IsNewResource() {
existing, err := client.Get(ctx, resourceGroup, factoryName, name, "")
if err != nil {
if !utils.ResponseWasNotFound(existing.Response) {
return fmt.Errorf("Error checking for presence of existing Data Factory Self-Hosted Integration Runtime %q (Resource Group %q, Data Factory %q): %s", name, resourceGroup, factoryName, err)
}
}

if existing.ID != nil && *existing.ID != "" {
return tf.ImportAsExistsError("azurerm_data_factory_integration_runtime_self_hosted", *existing.ID)
}
}

description := d.Get("description").(string)

selfHostedIntegrationRuntime := datafactory.SelfHostedIntegrationRuntime{
Description: &description,
Type: datafactory.TypeSelfHosted,
}

properties := expandAzureRmDataFactoryIntegrationRuntimeSelfHostedTypeProperties(d)
if properties != nil {
selfHostedIntegrationRuntime.SelfHostedIntegrationRuntimeTypeProperties = properties
}

basicIntegrationRuntime, _ := selfHostedIntegrationRuntime.AsBasicIntegrationRuntime()

integrationRuntime := datafactory.IntegrationRuntimeResource{
Name: &name,
Properties: basicIntegrationRuntime,
}

if _, err := client.CreateOrUpdate(ctx, resourceGroup, factoryName, name, integrationRuntime, ""); err != nil {
return fmt.Errorf("Error creating/updating Data Factory Self-Hosted Integration Runtime %q (Resource Group %q, Data Factory %q): %+v", name, resourceGroup, factoryName, err)
}

resp, err := client.Get(ctx, resourceGroup, factoryName, name, "")
if err != nil {
return fmt.Errorf("Error retrieving Data Factory Self-Hosted Integration Runtime %q (Resource Group %q, Data Factory %q): %+v", name, resourceGroup, factoryName, err)
}

if resp.ID == nil {
return fmt.Errorf("Cannot read Data Factory Self-Hosted Integration Runtime %q (Resource Group %q, Data Factory %q) ID", name, resourceGroup, factoryName)
}

d.SetId(*resp.ID)

return resourceArmDataFactoryIntegrationRuntimeSelfHostedRead(d, meta)
}

func resourceArmDataFactoryIntegrationRuntimeSelfHostedRead(d *schema.ResourceData, meta interface{}) error {
client := meta.(*clients.Client).DataFactory.IntegrationRuntimesClient
ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d)
defer cancel()

id, err := parse.DataFactoryIntegrationRuntimeID(d.Id())
if err != nil {
return err
}
resourceGroup := id.ResourceGroup
factoryName := id.DataFactory
name := id.Name

resp, err := client.Get(ctx, resourceGroup, factoryName, name, "")
if err != nil {
if utils.ResponseWasNotFound(resp.Response) {
d.SetId("")
return nil
}

return fmt.Errorf("Error retrieving Data Factory Self-Hosted Integration Runtime %q (Resource Group %q, Data Factory %q): %+v", name, resourceGroup, factoryName, err)
}

d.Set("name", name)
d.Set("data_factory_name", factoryName)
d.Set("resource_group_name", resourceGroup)

selfHostedIntegrationRuntime, convertSuccess := resp.Properties.AsSelfHostedIntegrationRuntime()

if !convertSuccess {
return fmt.Errorf("Error converting integration runtime to Self-Hosted integration runtime %q (Resource Group %q, Data Factory %q)", name, resourceGroup, factoryName)
}

if selfHostedIntegrationRuntime.Description != nil {
d.Set("description", selfHostedIntegrationRuntime.Description)
}

if props := selfHostedIntegrationRuntime.SelfHostedIntegrationRuntimeTypeProperties; props != nil {
// LinkedInfo BasicLinkedIntegrationRuntimeType
if linkedInfo := props.LinkedInfo; linkedInfo != nil {
rbacAuthorization, _ := linkedInfo.AsLinkedIntegrationRuntimeRbacAuthorization()
if rbacAuthorization != nil {
if err := d.Set("rbac_authorization", schema.NewSet(resourceArmDataFactoryIntegrationRuntimeSelfHostedRbacAuthorizationHash, flattenAzureRmDataFactoryIntegrationRuntimeSelfHostedTypePropertiesRbacAuthorization(rbacAuthorization))); err != nil {
return fmt.Errorf("Error setting `rbac_authorization`: %#v", err)
}
}
}
return nil
}

respKey, errKey := client.ListAuthKeys(ctx, resourceGroup, factoryName, name)
if errKey != nil {
if utils.ResponseWasNotFound(respKey.Response) {
d.SetId("")
return nil
}

return fmt.Errorf("Error retrieving Data Factory Self-Hosted Integration Runtime %q Auth Keys (Resource Group %q, Data Factory %q): %+v", name, resourceGroup, factoryName, errKey)
}

d.Set("auth_key_1", respKey.AuthKey1)
d.Set("auth_key_1", respKey.AuthKey2)

return nil
}

func resourceArmDataFactoryIntegrationRuntimeSelfHostedDelete(d *schema.ResourceData, meta interface{}) error {
client := meta.(*clients.Client).DataFactory.IntegrationRuntimesClient
ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d)
defer cancel()

id, err := parse.DataFactoryIntegrationRuntimeID(d.Id())
if err != nil {
return err
}
resourceGroup := id.ResourceGroup
factoryName := id.DataFactory
name := id.Name

response, err := client.Delete(ctx, resourceGroup, factoryName, name)
if err != nil {
if !utils.ResponseWasNotFound(response) {
return fmt.Errorf("Error deleting Data Factory SelfHosted Integration Runtime %q (Resource Group %q, Data Factory %q): %+v", name, resourceGroup, factoryName, err)
}
}
return nil
}

func expandAzureRmDataFactoryIntegrationRuntimeSelfHostedTypeProperties(d *schema.ResourceData) *datafactory.SelfHostedIntegrationRuntimeTypeProperties {
if _, ok := d.GetOk("rbac_authorization"); ok {
rbacAuthorization := d.Get("rbac_authorization").(*schema.Set).List()
rbacConfig := rbacAuthorization[0].(map[string]interface{})
rbac := rbacConfig["resource_id"].(string)
linkedInfo := &datafactory.SelfHostedIntegrationRuntimeTypeProperties{
LinkedInfo: &datafactory.LinkedIntegrationRuntimeRbacAuthorization{
ResourceID: &rbac,
AuthorizationType: datafactory.AuthorizationTypeRBAC,
},
}
return linkedInfo
}
return nil
}

func flattenAzureRmDataFactoryIntegrationRuntimeSelfHostedTypePropertiesRbacAuthorization(input *datafactory.LinkedIntegrationRuntimeRbacAuthorization) []interface{} {
result := make(map[string]interface{})
result["resource_id"] = *input.ResourceID

return []interface{}{result}
}

func resourceArmDataFactoryIntegrationRuntimeSelfHostedRbacAuthorizationHash(v interface{}) int {
var buf bytes.Buffer

if m, ok := v.(map[string]interface{}); ok {
if v, ok := m["resource_id"]; ok {
buf.WriteString(fmt.Sprintf("%s-", v.(string)))
}
}

return hashcode.String(buf.String())
}
@@ -0,0 +1,34 @@
package parse

import (
"fmt"

"github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure"
)

type DataFactoryIntegrationRuntimeId struct {
ResourceGroup string
Name string
DataFactory string
}

func DataFactoryIntegrationRuntimeID(input string) (*DataFactoryIntegrationRuntimeId, error) {
id, err := azure.ParseAzureResourceID(input)
if err != nil {
return nil, fmt.Errorf("[ERROR] Unable to parse Data Factory Integration Runtime ID %q: %+v", input, err)
}

dataFactoryIntegrationRuntime := DataFactoryIntegrationRuntimeId{
ResourceGroup: id.ResourceGroup,
}

if dataFactoryIntegrationRuntime.DataFactory, err = id.PopSegment("factories"); err != nil {
return nil, err
}

if dataFactoryIntegrationRuntime.Name, err = id.PopSegment("integrationruntimes"); err != nil {
return nil, err
}

return &dataFactoryIntegrationRuntime, nil
}
@@ -0,0 +1,61 @@
package parse

import "testing"

func TestParseDataFactoryIntegrationRuntimeID(t *testing.T) {
testData := []struct {
Name string
Input string
Expected *DataFactoryIntegrationRuntimeId
}{
{
Name: "Empty",
Input: "",
Expected: nil,
},
{
Name: "No Data Factory segment",
Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/",
Expected: nil,
},
{
Name: "No Integration Runtime name",
Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/Microsoft.DataFactory/factories/factory1/integrationruntimes/",
Expected: nil,
},
{
Name: "Case incorrect in path element",
Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/microsoft.dataFactory/factories/factory1/Integrationruntimes/integrationRuntimeName",
Expected: nil,
},
{
Name: "Valid",
Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myGroup1/providers/Microsoft.DataFactory/factories/factory1/integrationruntimes/integrationRuntimeName",
Expected: &DataFactoryIntegrationRuntimeId{
ResourceGroup: "myGroup1",
Name: "integrationRuntimeName",
DataFactory: "factory1",
},
},
}
for _, v := range testData {
t.Logf("[DEBUG] Testing %q", v.Name)

actual, err := DataFactoryIntegrationRuntimeID(v.Input)
if err != nil {
if v.Expected == nil {
continue
}

t.Fatalf("Expected a value but got an error: %s", err)
}

if actual.Name != v.Expected.Name {
t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name)
}

if actual.ResourceGroup != v.Expected.ResourceGroup {
t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup)
}
}
}
1 change: 1 addition & 0 deletions azurerm/internal/services/datafactory/registration.go
Expand Up @@ -33,6 +33,7 @@ func (r Registration) SupportedResources() map[string]*schema.Resource {
"azurerm_data_factory_dataset_postgresql": resourceArmDataFactoryDatasetPostgreSQL(),
"azurerm_data_factory_dataset_sql_server_table": resourceArmDataFactoryDatasetSQLServerTable(),
"azurerm_data_factory_integration_runtime_managed": resourceArmDataFactoryIntegrationRuntimeManaged(),
"azurerm_data_factory_integration_runtime_self_hosted": resourceArmDataFactoryIntegrationRuntimeSelfHosted(),
"azurerm_data_factory_linked_service_data_lake_storage_gen2": resourceArmDataFactoryLinkedServiceDataLakeStorageGen2(),
"azurerm_data_factory_linked_service_key_vault": resourceArmDataFactoryLinkedServiceKeyVault(),
"azurerm_data_factory_linked_service_mysql": resourceArmDataFactoryLinkedServiceMySQL(),
Expand Down