diff --git a/azurerm/internal/clients/client.go b/azurerm/internal/clients/client.go index 091e8abbbe14..fe72c41e485e 100644 --- a/azurerm/internal/clients/client.go +++ b/azurerm/internal/clients/client.go @@ -26,6 +26,7 @@ import ( databricks "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/databricks/client" datafactory "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datafactory/client" datalake "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datalake/client" + datashare "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/client" devspace "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/devspace/client" devtestlabs "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/devtestlabs/client" dns "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns/client" @@ -105,6 +106,7 @@ type Client struct { DataBricks *databricks.Client DataFactory *datafactory.Client Datalake *datalake.Client + DataShare *datashare.Client DevSpace *devspace.Client DevTestLabs *devtestlabs.Client Dns *dns.Client @@ -185,6 +187,7 @@ func (client *Client) Build(ctx context.Context, o *common.ClientOptions) error client.DataBricks = databricks.NewClient(o) client.DataFactory = datafactory.NewClient(o) client.Datalake = datalake.NewClient(o) + client.DataShare = datashare.NewClient(o) client.DevSpace = devspace.NewClient(o) client.DevTestLabs = devtestlabs.NewClient(o) client.Dns = dns.NewClient(o) diff --git a/azurerm/internal/provider/services.go b/azurerm/internal/provider/services.go index 944a42896853..a84eee7a0aa0 100644 --- a/azurerm/internal/provider/services.go +++ b/azurerm/internal/provider/services.go @@ -22,6 +22,7 @@ import ( "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/databricks" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datafactory" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datalake" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/devspace" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/devtestlabs" "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/dns" @@ -98,6 +99,7 @@ func SupportedServices() []common.ServiceRegistration { datafactory.Registration{}, datalake.Registration{}, databasemigration.Registration{}, + datashare.Registration{}, devspace.Registration{}, devtestlabs.Registration{}, dns.Registration{}, diff --git a/azurerm/internal/services/datashare/client/client.go b/azurerm/internal/services/datashare/client/client.go new file mode 100644 index 000000000000..1292d3cfd94e --- /dev/null +++ b/azurerm/internal/services/datashare/client/client.go @@ -0,0 +1,19 @@ +package client + +import ( + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/common" +) + +type Client struct { + AccountClient *datashare.AccountsClient +} + +func NewClient(o *common.ClientOptions) *Client { + accountClient := datashare.NewAccountsClientWithBaseURI(o.ResourceManagerEndpoint, o.SubscriptionId) + o.ConfigureClient(&accountClient.Client, o.ResourceManagerAuthorizer) + + return &Client{ + AccountClient: &accountClient, + } +} diff --git a/azurerm/internal/services/datashare/data_source_data_share_account.go b/azurerm/internal/services/datashare/data_source_data_share_account.go new file mode 100644 index 000000000000..8d467b8fab2b --- /dev/null +++ b/azurerm/internal/services/datashare/data_source_data_share_account.go @@ -0,0 +1,89 @@ +package datashare + +import ( + "fmt" + "log" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func dataSourceDataShareAccount() *schema.Resource { + return &schema.Resource{ + Read: dataSourceArmDataShareAccountRead, + + Timeouts: &schema.ResourceTimeout{ + Read: schema.DefaultTimeout(5 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validate.DataShareAccountName(), + }, + + "resource_group_name": azure.SchemaResourceGroupNameForDataSource(), + + "identity": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "type": { + Type: schema.TypeString, + Computed: true, + }, + "principal_id": { + Type: schema.TypeString, + Computed: true, + }, + "tenant_id": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + + "tags": tags.SchemaDataSource(), + }, + } +} + +func dataSourceArmDataShareAccountRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.AccountClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + resp, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] DataShare %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving DataShare Account %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if resp.ID == nil || *resp.ID == "" { + return fmt.Errorf("reading DataShare Account %q (Resource Group %q): ID is empty or nil", name, resourceGroup) + } + + d.SetId(*resp.ID) + d.Set("name", name) + d.Set("resource_group_name", resourceGroup) + if err := d.Set("identity", flattenAzureRmDataShareAccountIdentity(resp.Identity)); err != nil { + return fmt.Errorf("setting `identity`: %+v", err) + } + return tags.FlattenAndSet(d, resp.Tags) +} diff --git a/azurerm/internal/services/datashare/parse/data_share.go b/azurerm/internal/services/datashare/parse/data_share.go new file mode 100644 index 000000000000..83bc7c3404a1 --- /dev/null +++ b/azurerm/internal/services/datashare/parse/data_share.go @@ -0,0 +1,31 @@ +package parse + +import ( + "fmt" + + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" +) + +type DataShareAccountId struct { + ResourceGroup string + Name string +} + +func DataShareAccountID(input string) (*DataShareAccountId, error) { + id, err := azure.ParseAzureResourceID(input) + if err != nil { + return nil, fmt.Errorf("parsing DataShareAccount ID %q: %+v", input, err) + } + + dataShareAccount := DataShareAccountId{ + ResourceGroup: id.ResourceGroup, + } + if dataShareAccount.Name, err = id.PopSegment("accounts"); err != nil { + return nil, err + } + if err := id.ValidateNoEmptySegments(input); err != nil { + return nil, err + } + + return &dataShareAccount, nil +} diff --git a/azurerm/internal/services/datashare/parse/data_share_test.go b/azurerm/internal/services/datashare/parse/data_share_test.go new file mode 100644 index 000000000000..3c14595b2d91 --- /dev/null +++ b/azurerm/internal/services/datashare/parse/data_share_test.go @@ -0,0 +1,72 @@ +package parse + +import ( + "testing" +) + +func TestDataShareAccountID(t *testing.T) { + testData := []struct { + Name string + Input string + Expected *DataShareAccountId + }{ + { + Name: "Empty", + Input: "", + Expected: nil, + }, + { + Name: "No Resource Groups Segment", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000", + Expected: nil, + }, + { + Name: "No Resource Groups Value", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/", + Expected: nil, + }, + { + Name: "Resource Group ID", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/foo/", + Expected: nil, + }, + { + Name: "Missing Account Value", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/", + Expected: nil, + }, + { + Name: "Datashare account ID", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/accounts/account1", + Expected: &DataShareAccountId{ + Name: "account1", + ResourceGroup: "resGroup1", + }, + }, + { + Name: "Wrong Casing", + Input: "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/resGroup1/providers/Microsoft.DataShare/Accounts/account1", + Expected: nil, + }, + } + + for _, v := range testData { + t.Logf("[DEBUG] Testing %q..", v.Name) + + actual, err := DataShareAccountID(v.Input) + if err != nil { + if v.Expected == nil { + continue + } + t.Fatalf("Expected a value but got an error: %s", err) + } + + if actual.ResourceGroup != v.Expected.ResourceGroup { + t.Fatalf("Expected %q but got %q for ResourceGroup", v.Expected.ResourceGroup, actual.ResourceGroup) + } + + if actual.Name != v.Expected.Name { + t.Fatalf("Expected %q but got %q for Name", v.Expected.Name, actual.Name) + } + } +} diff --git a/azurerm/internal/services/datashare/registration.go b/azurerm/internal/services/datashare/registration.go new file mode 100644 index 000000000000..0ddb1dd85d4e --- /dev/null +++ b/azurerm/internal/services/datashare/registration.go @@ -0,0 +1,31 @@ +package datashare + +import "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + +type Registration struct{} + +// Name is the name of this Service +func (r Registration) Name() string { + return "Data Share" +} + +// WebsiteCategories returns a list of categories which can be used for the sidebar +func (r Registration) WebsiteCategories() []string { + return []string{ + "Data Share", + } +} + +// SupportedDataSources returns the supported Data Sources supported by this Service +func (r Registration) SupportedDataSources() map[string]*schema.Resource { + return map[string]*schema.Resource{ + "azurerm_data_share_account": dataSourceDataShareAccount(), + } +} + +// SupportedResources returns the supported Resources supported by this Service +func (r Registration) SupportedResources() map[string]*schema.Resource { + return map[string]*schema.Resource{ + "azurerm_data_share_account": resourceArmDataShareAccount(), + } +} diff --git a/azurerm/internal/services/datashare/resource_arm_data_share_account.go b/azurerm/internal/services/datashare/resource_arm_data_share_account.go new file mode 100644 index 000000000000..c1828cac2c7b --- /dev/null +++ b/azurerm/internal/services/datashare/resource_arm_data_share_account.go @@ -0,0 +1,235 @@ +package datashare + +import ( + "fmt" + "log" + "time" + + "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/azure" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/helpers/tf" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/location" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/validate" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tags" + azSchema "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/tf/schema" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/timeouts" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func resourceArmDataShareAccount() *schema.Resource { + return &schema.Resource{ + Create: resourceArmDataShareAccountCreate, + Read: resourceArmDataShareAccountRead, + Update: resourceArmDataShareAccountUpdate, + Delete: resourceArmDataShareAccountDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(30 * time.Minute), + Read: schema.DefaultTimeout(5 * time.Minute), + Update: schema.DefaultTimeout(30 * time.Minute), + Delete: schema.DefaultTimeout(30 * time.Minute), + }, + + Importer: azSchema.ValidateResourceIDPriorToImport(func(id string) error { + _, err := parse.DataShareAccountID(id) + return err + }), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: validate.DataShareAccountName(), + }, + + "resource_group_name": azure.SchemaResourceGroupName(), + + "location": azure.SchemaLocation(), + + "identity": { + Type: schema.TypeList, + Required: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "type": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{ + string(datashare.SystemAssigned), + }, false), + }, + "principal_id": { + Type: schema.TypeString, + Computed: true, + }, + "tenant_id": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + + // the api will save and return the tag keys in lowercase, so an extra validation of the key is all in lowercase is added + // issue has been created https://github.com/Azure/azure-rest-api-specs/issues/9280 + "tags": tags.SchemaEnforceLowerCaseKeys(), + }, + } +} +func resourceArmDataShareAccountCreate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.AccountClient + ctx, cancel := timeouts.ForCreate(meta.(*clients.Client).StopContext, d) + defer cancel() + + name := d.Get("name").(string) + resourceGroup := d.Get("resource_group_name").(string) + + existing, err := client.Get(ctx, resourceGroup, name) + if err != nil { + if !utils.ResponseWasNotFound(existing.Response) { + return fmt.Errorf("checking for present of existing DataShare Account %q (Resource Group %q): %+v", name, resourceGroup, err) + } + } + if existing.ID != nil && *existing.ID != "" { + return tf.ImportAsExistsError("azurerm_data_share_account", *existing.ID) + } + + account := datashare.Account{ + Name: utils.String(name), + Location: utils.String(location.Normalize(d.Get("location").(string))), + Identity: expandAzureRmDataShareAccountIdentity(d.Get("identity").([]interface{})), + Tags: tags.Expand(d.Get("tags").(map[string]interface{})), + } + + future, err := client.Create(ctx, resourceGroup, name, account) + if err != nil { + return fmt.Errorf("creating DataShare Account %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting on creating future for DataShare Account %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + resp, err := client.Get(ctx, resourceGroup, name) + if err != nil { + return fmt.Errorf("retrieving DataShare Account %q (Resource Group %q): %+v", name, resourceGroup, err) + } + + if resp.ID == nil || *resp.ID == "" { + return fmt.Errorf("reading DataShare Account %q (Resource Group %q): ID is empty or nil", name, resourceGroup) + } + + d.SetId(*resp.ID) + + return resourceArmDataShareAccountRead(d, meta) +} + +func resourceArmDataShareAccountRead(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.AccountClient + ctx, cancel := timeouts.ForRead(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataShareAccountID(d.Id()) + if err != nil { + return err + } + + resp, err := client.Get(ctx, id.ResourceGroup, id.Name) + if err != nil { + if utils.ResponseWasNotFound(resp.Response) { + log.Printf("[INFO] DataShare %q does not exist - removing from state", d.Id()) + d.SetId("") + return nil + } + return fmt.Errorf("retrieving DataShare Account %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + d.Set("name", id.Name) + d.Set("resource_group_name", id.ResourceGroup) + d.Set("location", location.NormalizeNilable(resp.Location)) + if err := d.Set("identity", flattenAzureRmDataShareAccountIdentity(resp.Identity)); err != nil { + return fmt.Errorf("setting `identity`: %+v", err) + } + + return tags.FlattenAndSet(d, resp.Tags) +} + +func resourceArmDataShareAccountUpdate(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.AccountClient + ctx, cancel := timeouts.ForUpdate(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataShareAccountID(d.Id()) + if err != nil { + return err + } + + props := datashare.AccountUpdateParameters{} + + if d.HasChange("tags") { + props.Tags = tags.Expand(d.Get("tags").(map[string]interface{})) + } + + _, err = client.Update(ctx, id.ResourceGroup, id.Name, props) + if err != nil { + return fmt.Errorf("updating DataShare Account %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + return resourceArmDataShareAccountRead(d, meta) +} + +func resourceArmDataShareAccountDelete(d *schema.ResourceData, meta interface{}) error { + client := meta.(*clients.Client).DataShare.AccountClient + ctx, cancel := timeouts.ForDelete(meta.(*clients.Client).StopContext, d) + defer cancel() + + id, err := parse.DataShareAccountID(d.Id()) + if err != nil { + return err + } + + future, err := client.Delete(ctx, id.ResourceGroup, id.Name) + if err != nil { + return fmt.Errorf("deleting DataShare Account %q (Resource Group %q): %+v", id.Name, id.ResourceGroup, err) + } + + if err = future.WaitForCompletionRef(ctx, client.Client); err != nil { + return fmt.Errorf("waiting for DataShare Account %q (Resource Group %q) to be deleted: %+v", id.Name, id.ResourceGroup, err) + } + return nil +} + +func expandAzureRmDataShareAccountIdentity(input []interface{}) *datashare.Identity { + identity := input[0].(map[string]interface{}) + return &datashare.Identity{ + Type: datashare.Type(identity["type"].(string)), + } +} + +func flattenAzureRmDataShareAccountIdentity(identity *datashare.Identity) []interface{} { + if identity == nil { + return make([]interface{}, 0) + } + + var principalId, tenantId string + + if identity.PrincipalID != nil { + principalId = *identity.PrincipalID + } + if identity.TenantID != nil { + tenantId = *identity.TenantID + } + + return []interface{}{ + map[string]interface{}{ + "type": string(identity.Type), + "principal_id": principalId, + "tenant_id": tenantId, + }, + } +} diff --git a/azurerm/internal/services/datashare/tests/data_source_data_share_account_test.go b/azurerm/internal/services/datashare/tests/data_source_data_share_account_test.go new file mode 100644 index 000000000000..ca83c7d0eb6e --- /dev/null +++ b/azurerm/internal/services/datashare/tests/data_source_data_share_account_test.go @@ -0,0 +1,43 @@ +package tests + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" +) + +func TestAccDataSourceAzureRMDataShareAccount_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "data.azurerm_data_share_account", "test") + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareAccountDestroy, + Steps: []resource.TestStep{ + { + Config: testAccDataSourceDataShareAccount_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareAccountExists(data.ResourceName), + resource.TestCheckResourceAttr(data.ResourceName, "tags.%", "1"), + resource.TestCheckResourceAttr(data.ResourceName, "tags.env", "Test"), + resource.TestCheckResourceAttr(data.ResourceName, "identity.0.type", "SystemAssigned"), + resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.principal_id"), + resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.tenant_id"), + ), + }, + }, + }) +} + +func testAccDataSourceDataShareAccount_basic(data acceptance.TestData) string { + config := testAccAzureRMDataShareAccount_complete(data) + return fmt.Sprintf(` +%s + +data "azurerm_data_share_account" "test" { + name = azurerm_data_share_account.test.name + resource_group_name = azurerm_resource_group.test.name +} +`, config) +} diff --git a/azurerm/internal/services/datashare/tests/resource_arm_data_share_account_test.go b/azurerm/internal/services/datashare/tests/resource_arm_data_share_account_test.go new file mode 100644 index 000000000000..292b6229aa9e --- /dev/null +++ b/azurerm/internal/services/datashare/tests/resource_arm_data_share_account_test.go @@ -0,0 +1,250 @@ +package tests + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/terraform" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/acceptance" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/clients" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/internal/services/datashare/parse" + "github.com/terraform-providers/terraform-provider-azurerm/azurerm/utils" +) + +func TestAccAzureRMDataShareAccount_basic(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_account", "test") + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareAccountDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareAccount_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareAccountExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.principal_id"), + resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.tenant_id"), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMDataShareAccount_requiresImport(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_account", "test") + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareAccountDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareAccount_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareAccountExists(data.ResourceName), + ), + }, + data.RequiresImportErrorStep(testAccAzureRMDataShareAccount_requiresImport), + }, + }) +} + +func TestAccAzureRMDataShareAccount_complete(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_account", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareAccountDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareAccount_complete(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareAccountExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.principal_id"), + resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.tenant_id"), + ), + }, + data.ImportStep(), + }, + }) +} + +func TestAccAzureRMDataShareAccount_update(t *testing.T) { + data := acceptance.BuildTestData(t, "azurerm_data_share_account", "test") + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acceptance.PreCheck(t) }, + Providers: acceptance.SupportedProviders, + CheckDestroy: testCheckAzureRMDataShareAccountDestroy, + Steps: []resource.TestStep{ + { + Config: testAccAzureRMDataShareAccount_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareAccountExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.principal_id"), + resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.tenant_id"), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMDataShareAccount_complete(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareAccountExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.principal_id"), + resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.tenant_id"), + ), + }, + data.ImportStep(), + { + Config: testAccAzureRMDataShareAccount_update(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareAccountExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.principal_id"), + resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.tenant_id"), + ), + }, + { + Config: testAccAzureRMDataShareAccount_basic(data), + Check: resource.ComposeTestCheckFunc( + testCheckAzureRMDataShareAccountExists(data.ResourceName), + resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.principal_id"), + resource.TestCheckResourceAttrSet(data.ResourceName, "identity.0.tenant_id"), + ), + }, + data.ImportStep(), + data.ImportStep(), + }, + }) +} + +func testCheckAzureRMDataShareAccountExists(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).DataShare.AccountClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("dataShare Account not found: %s", resourceName) + } + id, err := parse.DataShareAccountID(rs.Primary.ID) + if err != nil { + return err + } + if resp, err := client.Get(ctx, id.ResourceGroup, id.Name); err != nil { + if !utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("bad: data_share account %q does not exist", id.Name) + } + return fmt.Errorf("bad: Get on DataShareAccountClient: %+v", err) + } + return nil + } +} + +func testCheckAzureRMDataShareAccountDestroy(s *terraform.State) error { + client := acceptance.AzureProvider.Meta().(*clients.Client).DataShare.AccountClient + ctx := acceptance.AzureProvider.Meta().(*clients.Client).StopContext + + for _, rs := range s.RootModule().Resources { + if rs.Type != "azurerm_data_share_account" { + continue + } + id, err := parse.DataShareAccountID(rs.Primary.ID) + if err != nil { + return err + } + if resp, err := client.Get(ctx, id.ResourceGroup, id.Name); err != nil { + if !utils.ResponseWasNotFound(resp.Response) { + return fmt.Errorf("bad: Get on data_share.accountClient: %+v", err) + } + } + return nil + } + return nil +} + +func testAccAzureRMDataShareAccount_template(data acceptance.TestData) string { + return fmt.Sprintf(` +provider "azurerm" { + features {} +} + +resource "azurerm_resource_group" "test" { + name = "acctest-datashare-%d" + location = "%s" +} +`, data.RandomInteger, data.Locations.Primary) +} + +func testAccAzureRMDataShareAccount_basic(data acceptance.TestData) string { + template := testAccAzureRMDataShareAccount_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_account" "test" { + name = "acctest-DSA-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } +} +`, template, data.RandomInteger) +} + +func testAccAzureRMDataShareAccount_requiresImport(data acceptance.TestData) string { + config := testAccAzureRMDataShareAccount_basic(data) + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_account" "import" { + name = azurerm_data_share_account.test.name + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } +} +`, config) +} + +func testAccAzureRMDataShareAccount_complete(data acceptance.TestData) string { + template := testAccAzureRMDataShareAccount_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_account" "test" { + name = "acctest-DSA-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } + + tags = { + env = "Test" + } +} +`, template, data.RandomInteger) +} + +func testAccAzureRMDataShareAccount_update(data acceptance.TestData) string { + template := testAccAzureRMDataShareAccount_template(data) + return fmt.Sprintf(` +%s + +resource "azurerm_data_share_account" "test" { + name = "acctest-DSA-%d" + location = azurerm_resource_group.test.location + resource_group_name = azurerm_resource_group.test.name + identity { + type = "SystemAssigned" + } + + tags = { + env = "Stage" + } +} +`, template, data.RandomInteger) +} diff --git a/azurerm/internal/services/datashare/validate/data_share.go b/azurerm/internal/services/datashare/validate/data_share.go new file mode 100644 index 000000000000..4bd6971dfe22 --- /dev/null +++ b/azurerm/internal/services/datashare/validate/data_share.go @@ -0,0 +1,14 @@ +package validate + +import ( + "regexp" + + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/helper/validation" +) + +func DataShareAccountName() schema.SchemaValidateFunc { + return validation.StringMatch( + regexp.MustCompile(`^[^<>%&:\\?/#*$^();,.\|+={}\[\]!~@]{3,90}$`), `Data share account name should have length of 3 - 90, and cannot contain <>%&:\?/#*$^();,.|+={}[]!~@.`, + ) +} diff --git a/azurerm/internal/services/datashare/validate/data_share_test.go b/azurerm/internal/services/datashare/validate/data_share_test.go new file mode 100644 index 000000000000..7e3d706bef34 --- /dev/null +++ b/azurerm/internal/services/datashare/validate/data_share_test.go @@ -0,0 +1,57 @@ +package validate + +import "testing" + +func TestDataShareAccountName(t *testing.T) { + tests := []struct { + name string + input string + valid bool + }{ + { + name: "Invalid Character 1", + input: "DC\\", + valid: false, + }, + { + name: "Invalid Character 2", + input: "[abc]", + valid: false, + }, + { + name: "Valid Account Name", + input: "acc-test", + valid: true, + }, + { + name: "Invalid Character 3", + input: "test&", + valid: false, + }, + { + name: "Too Few Character", + input: "ab", + valid: false, + }, + { + name: "Valid Account Name 2", + input: "aa-BB_88", + valid: true, + }, + { + name: "Valid Account Name 3", + input: "aac-", + valid: true, + }, + } + var validationFunction = DataShareAccountName() + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := validationFunction(tt.input, "") + valid := err == nil + if valid != tt.valid { + t.Errorf("Expected valid status %t but got %t for input %s", tt.valid, valid, tt.input) + } + }) + } +} diff --git a/azurerm/internal/tags/schema.go b/azurerm/internal/tags/schema.go index 71b4e065dfe6..1438b11a8a64 100644 --- a/azurerm/internal/tags/schema.go +++ b/azurerm/internal/tags/schema.go @@ -40,3 +40,15 @@ func Schema() *schema.Schema { }, } } + +// Schema returns the Schema used for Tags +func SchemaEnforceLowerCaseKeys() *schema.Schema { + return &schema.Schema{ + Type: schema.TypeMap, + Optional: true, + ValidateFunc: EnforceLowerCaseKeys, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + } +} diff --git a/azurerm/internal/tags/validation.go b/azurerm/internal/tags/validation.go index 1bca2c7d3e61..216fbf2f15af 100644 --- a/azurerm/internal/tags/validation.go +++ b/azurerm/internal/tags/validation.go @@ -1,6 +1,9 @@ package tags -import "fmt" +import ( + "fmt" + "strings" +) func Validate(v interface{}, _ string) (warnings []string, errors []error) { tagsMap := v.(map[string]interface{}) @@ -35,3 +38,39 @@ func TagValueToString(v interface{}) (string, error) { return "", fmt.Errorf("unknown tag type %T in tag value", value) } } + +func EnforceLowerCaseKeys(i interface{}, k string) (warnings []string, errors []error) { + tagsMap, ok := i.(map[string]interface{}) + if !ok { + errors = append(errors, fmt.Errorf("expected type of %s to be map", k)) + return warnings, errors + } + + if len(tagsMap) > 50 { + errors = append(errors, fmt.Errorf("a maximum of 50 tags can be applied to each ARM resource")) + } + + for key, value := range tagsMap { + if len(key) > 512 { + errors = append(errors, fmt.Errorf("the maximum length for a tag key is 512 characters: %q has %d characters", key, len(key))) + return warnings, errors + } + + if strings.ToLower(key) != key { + errors = append(errors, fmt.Errorf("a tag key %q expected to be all in lowercase", key)) + return warnings, errors + } + + v, err := TagValueToString(value) + if err != nil { + errors = append(errors, err) + return warnings, errors + } + if len(v) > 256 { + errors = append(errors, fmt.Errorf("the maximum length for a tag value is 256 characters: the value for %q has %d characters", key, len(v))) + return warnings, errors + } + } + + return warnings, errors +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/accounts.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/accounts.go new file mode 100644 index 000000000000..379f0d9c1aad --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/accounts.go @@ -0,0 +1,589 @@ +package datashare + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/autorest/validation" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// AccountsClient is the creates a Microsoft.DataShare management client. +type AccountsClient struct { + BaseClient +} + +// NewAccountsClient creates an instance of the AccountsClient client. +func NewAccountsClient(subscriptionID string) AccountsClient { + return NewAccountsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewAccountsClientWithBaseURI creates an instance of the AccountsClient client using a custom endpoint. Use this +// when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). +func NewAccountsClientWithBaseURI(baseURI string, subscriptionID string) AccountsClient { + return AccountsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// Create create an account +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// account - the account payload. +func (client AccountsClient) Create(ctx context.Context, resourceGroupName string, accountName string, account Account) (result AccountsCreateFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/AccountsClient.Create") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: account, + Constraints: []validation.Constraint{{Target: "account.Identity", Name: validation.Null, Rule: true, Chain: nil}}}}); err != nil { + return result, validation.NewError("datashare.AccountsClient", "Create", err.Error()) + } + + req, err := client.CreatePreparer(ctx, resourceGroupName, accountName, account) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.AccountsClient", "Create", nil, "Failure preparing request") + return + } + + result, err = client.CreateSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.AccountsClient", "Create", result.Response(), "Failure sending request") + return + } + + return +} + +// CreatePreparer prepares the Create request. +func (client AccountsClient) CreatePreparer(ctx context.Context, resourceGroupName string, accountName string, account Account) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}", pathParameters), + autorest.WithJSON(account), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateSender sends the Create request. The method will close the +// http.Response Body if it receives an error. +func (client AccountsClient) CreateSender(req *http.Request) (future AccountsCreateFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// CreateResponder handles the response to the Create request. The method always +// closes the http.Response Body. +func (client AccountsClient) CreateResponder(resp *http.Response) (result Account, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete deleteAccount +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +func (client AccountsClient) Delete(ctx context.Context, resourceGroupName string, accountName string) (result AccountsDeleteFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/AccountsClient.Delete") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.DeletePreparer(ctx, resourceGroupName, accountName) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.AccountsClient", "Delete", nil, "Failure preparing request") + return + } + + result, err = client.DeleteSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.AccountsClient", "Delete", result.Response(), "Failure sending request") + return + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client AccountsClient) DeletePreparer(ctx context.Context, resourceGroupName string, accountName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client AccountsClient) DeleteSender(req *http.Request) (future AccountsDeleteFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client AccountsClient) DeleteResponder(resp *http.Response) (result OperationResponse, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Get get an account +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +func (client AccountsClient) Get(ctx context.Context, resourceGroupName string, accountName string) (result Account, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/AccountsClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.GetPreparer(ctx, resourceGroupName, accountName) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.AccountsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.AccountsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.AccountsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client AccountsClient) GetPreparer(ctx context.Context, resourceGroupName string, accountName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client AccountsClient) GetSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client AccountsClient) GetResponder(resp *http.Response) (result Account, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByResourceGroup list Accounts in ResourceGroup +// Parameters: +// resourceGroupName - the resource group name. +// skipToken - continuation token +func (client AccountsClient) ListByResourceGroup(ctx context.Context, resourceGroupName string, skipToken string) (result AccountListPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/AccountsClient.ListByResourceGroup") + defer func() { + sc := -1 + if result.al.Response.Response != nil { + sc = result.al.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listByResourceGroupNextResults + req, err := client.ListByResourceGroupPreparer(ctx, resourceGroupName, skipToken) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.AccountsClient", "ListByResourceGroup", nil, "Failure preparing request") + return + } + + resp, err := client.ListByResourceGroupSender(req) + if err != nil { + result.al.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.AccountsClient", "ListByResourceGroup", resp, "Failure sending request") + return + } + + result.al, err = client.ListByResourceGroupResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.AccountsClient", "ListByResourceGroup", resp, "Failure responding to request") + } + + return +} + +// ListByResourceGroupPreparer prepares the ListByResourceGroup request. +func (client AccountsClient) ListByResourceGroupPreparer(ctx context.Context, resourceGroupName string, skipToken string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(skipToken) > 0 { + queryParameters["$skipToken"] = autorest.Encode("query", skipToken) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByResourceGroupSender sends the ListByResourceGroup request. The method will close the +// http.Response Body if it receives an error. +func (client AccountsClient) ListByResourceGroupSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListByResourceGroupResponder handles the response to the ListByResourceGroup request. The method always +// closes the http.Response Body. +func (client AccountsClient) ListByResourceGroupResponder(resp *http.Response) (result AccountList, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByResourceGroupNextResults retrieves the next set of results, if any. +func (client AccountsClient) listByResourceGroupNextResults(ctx context.Context, lastResults AccountList) (result AccountList, err error) { + req, err := lastResults.accountListPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "datashare.AccountsClient", "listByResourceGroupNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByResourceGroupSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "datashare.AccountsClient", "listByResourceGroupNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByResourceGroupResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.AccountsClient", "listByResourceGroupNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByResourceGroupComplete enumerates all values, automatically crossing page boundaries as required. +func (client AccountsClient) ListByResourceGroupComplete(ctx context.Context, resourceGroupName string, skipToken string) (result AccountListIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/AccountsClient.ListByResourceGroup") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListByResourceGroup(ctx, resourceGroupName, skipToken) + return +} + +// ListBySubscription list Accounts in Subscription +// Parameters: +// skipToken - continuation token +func (client AccountsClient) ListBySubscription(ctx context.Context, skipToken string) (result AccountListPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/AccountsClient.ListBySubscription") + defer func() { + sc := -1 + if result.al.Response.Response != nil { + sc = result.al.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listBySubscriptionNextResults + req, err := client.ListBySubscriptionPreparer(ctx, skipToken) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.AccountsClient", "ListBySubscription", nil, "Failure preparing request") + return + } + + resp, err := client.ListBySubscriptionSender(req) + if err != nil { + result.al.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.AccountsClient", "ListBySubscription", resp, "Failure sending request") + return + } + + result.al, err = client.ListBySubscriptionResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.AccountsClient", "ListBySubscription", resp, "Failure responding to request") + } + + return +} + +// ListBySubscriptionPreparer prepares the ListBySubscription request. +func (client AccountsClient) ListBySubscriptionPreparer(ctx context.Context, skipToken string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(skipToken) > 0 { + queryParameters["$skipToken"] = autorest.Encode("query", skipToken) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.DataShare/accounts", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListBySubscriptionSender sends the ListBySubscription request. The method will close the +// http.Response Body if it receives an error. +func (client AccountsClient) ListBySubscriptionSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListBySubscriptionResponder handles the response to the ListBySubscription request. The method always +// closes the http.Response Body. +func (client AccountsClient) ListBySubscriptionResponder(resp *http.Response) (result AccountList, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listBySubscriptionNextResults retrieves the next set of results, if any. +func (client AccountsClient) listBySubscriptionNextResults(ctx context.Context, lastResults AccountList) (result AccountList, err error) { + req, err := lastResults.accountListPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "datashare.AccountsClient", "listBySubscriptionNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListBySubscriptionSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "datashare.AccountsClient", "listBySubscriptionNextResults", resp, "Failure sending next results request") + } + result, err = client.ListBySubscriptionResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.AccountsClient", "listBySubscriptionNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListBySubscriptionComplete enumerates all values, automatically crossing page boundaries as required. +func (client AccountsClient) ListBySubscriptionComplete(ctx context.Context, skipToken string) (result AccountListIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/AccountsClient.ListBySubscription") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListBySubscription(ctx, skipToken) + return +} + +// Update patch an account +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// accountUpdateParameters - the account update parameters. +func (client AccountsClient) Update(ctx context.Context, resourceGroupName string, accountName string, accountUpdateParameters AccountUpdateParameters) (result Account, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/AccountsClient.Update") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.UpdatePreparer(ctx, resourceGroupName, accountName, accountUpdateParameters) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.AccountsClient", "Update", nil, "Failure preparing request") + return + } + + resp, err := client.UpdateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.AccountsClient", "Update", resp, "Failure sending request") + return + } + + result, err = client.UpdateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.AccountsClient", "Update", resp, "Failure responding to request") + } + + return +} + +// UpdatePreparer prepares the Update request. +func (client AccountsClient) UpdatePreparer(ctx context.Context, resourceGroupName string, accountName string, accountUpdateParameters AccountUpdateParameters) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPatch(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}", pathParameters), + autorest.WithJSON(accountUpdateParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// UpdateSender sends the Update request. The method will close the +// http.Response Body if it receives an error. +func (client AccountsClient) UpdateSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// UpdateResponder handles the response to the Update request. The method always +// closes the http.Response Body. +func (client AccountsClient) UpdateResponder(resp *http.Response) (result Account, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/client.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/client.go new file mode 100644 index 000000000000..03ccba7b5667 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/client.go @@ -0,0 +1,52 @@ +// Package datashare implements the Azure ARM Datashare service API version 2019-11-01. +// +// Creates a Microsoft.DataShare management client. +package datashare + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "github.com/Azure/go-autorest/autorest" +) + +const ( + // DefaultBaseURI is the default URI used for the service Datashare + DefaultBaseURI = "https://management.azure.com" +) + +// BaseClient is the base client for Datashare. +type BaseClient struct { + autorest.Client + BaseURI string + SubscriptionID string +} + +// New creates an instance of the BaseClient client. +func New(subscriptionID string) BaseClient { + return NewWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewWithBaseURI creates an instance of the BaseClient client using a custom endpoint. Use this when interacting with +// an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). +func NewWithBaseURI(baseURI string, subscriptionID string) BaseClient { + return BaseClient{ + Client: autorest.NewClientWithUserAgent(UserAgent()), + BaseURI: baseURI, + SubscriptionID: subscriptionID, + } +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/consumerinvitations.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/consumerinvitations.go new file mode 100644 index 000000000000..c40500a0418e --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/consumerinvitations.go @@ -0,0 +1,312 @@ +package datashare + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/autorest/validation" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// ConsumerInvitationsClient is the creates a Microsoft.DataShare management client. +type ConsumerInvitationsClient struct { + BaseClient +} + +// NewConsumerInvitationsClient creates an instance of the ConsumerInvitationsClient client. +func NewConsumerInvitationsClient(subscriptionID string) ConsumerInvitationsClient { + return NewConsumerInvitationsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewConsumerInvitationsClientWithBaseURI creates an instance of the ConsumerInvitationsClient client using a custom +// endpoint. Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure +// stack). +func NewConsumerInvitationsClientWithBaseURI(baseURI string, subscriptionID string) ConsumerInvitationsClient { + return ConsumerInvitationsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// Get get an invitation +// Parameters: +// location - location of the invitation +// invitationID - an invitation id +func (client ConsumerInvitationsClient) Get(ctx context.Context, location string, invitationID string) (result ConsumerInvitation, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ConsumerInvitationsClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.GetPreparer(ctx, location, invitationID) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ConsumerInvitationsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.ConsumerInvitationsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ConsumerInvitationsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client ConsumerInvitationsClient) GetPreparer(ctx context.Context, location string, invitationID string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "invitationId": autorest.Encode("path", invitationID), + "location": autorest.Encode("path", location), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/providers/Microsoft.DataShare/locations/{location}/consumerInvitations/{invitationId}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client ConsumerInvitationsClient) GetSender(req *http.Request) (*http.Response, error) { + return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client ConsumerInvitationsClient) GetResponder(resp *http.Response) (result ConsumerInvitation, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListInvitations lists invitations +// Parameters: +// skipToken - the continuation token +func (client ConsumerInvitationsClient) ListInvitations(ctx context.Context, skipToken string) (result ConsumerInvitationListPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ConsumerInvitationsClient.ListInvitations") + defer func() { + sc := -1 + if result.cil.Response.Response != nil { + sc = result.cil.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listInvitationsNextResults + req, err := client.ListInvitationsPreparer(ctx, skipToken) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ConsumerInvitationsClient", "ListInvitations", nil, "Failure preparing request") + return + } + + resp, err := client.ListInvitationsSender(req) + if err != nil { + result.cil.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.ConsumerInvitationsClient", "ListInvitations", resp, "Failure sending request") + return + } + + result.cil, err = client.ListInvitationsResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ConsumerInvitationsClient", "ListInvitations", resp, "Failure responding to request") + } + + return +} + +// ListInvitationsPreparer prepares the ListInvitations request. +func (client ConsumerInvitationsClient) ListInvitationsPreparer(ctx context.Context, skipToken string) (*http.Request, error) { + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(skipToken) > 0 { + queryParameters["$skipToken"] = autorest.Encode("query", skipToken) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPath("/providers/Microsoft.DataShare/ListInvitations"), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListInvitationsSender sends the ListInvitations request. The method will close the +// http.Response Body if it receives an error. +func (client ConsumerInvitationsClient) ListInvitationsSender(req *http.Request) (*http.Response, error) { + return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) +} + +// ListInvitationsResponder handles the response to the ListInvitations request. The method always +// closes the http.Response Body. +func (client ConsumerInvitationsClient) ListInvitationsResponder(resp *http.Response) (result ConsumerInvitationList, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listInvitationsNextResults retrieves the next set of results, if any. +func (client ConsumerInvitationsClient) listInvitationsNextResults(ctx context.Context, lastResults ConsumerInvitationList) (result ConsumerInvitationList, err error) { + req, err := lastResults.consumerInvitationListPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "datashare.ConsumerInvitationsClient", "listInvitationsNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListInvitationsSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "datashare.ConsumerInvitationsClient", "listInvitationsNextResults", resp, "Failure sending next results request") + } + result, err = client.ListInvitationsResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ConsumerInvitationsClient", "listInvitationsNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListInvitationsComplete enumerates all values, automatically crossing page boundaries as required. +func (client ConsumerInvitationsClient) ListInvitationsComplete(ctx context.Context, skipToken string) (result ConsumerInvitationListIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ConsumerInvitationsClient.ListInvitations") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListInvitations(ctx, skipToken) + return +} + +// RejectInvitation reject an invitation +// Parameters: +// location - location of the invitation +// invitation - an invitation payload +func (client ConsumerInvitationsClient) RejectInvitation(ctx context.Context, location string, invitation ConsumerInvitation) (result ConsumerInvitation, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ConsumerInvitationsClient.RejectInvitation") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: invitation, + Constraints: []validation.Constraint{{Target: "invitation.ConsumerInvitationProperties", Name: validation.Null, Rule: true, + Chain: []validation.Constraint{{Target: "invitation.ConsumerInvitationProperties.InvitationID", Name: validation.Null, Rule: true, Chain: nil}}}}}}); err != nil { + return result, validation.NewError("datashare.ConsumerInvitationsClient", "RejectInvitation", err.Error()) + } + + req, err := client.RejectInvitationPreparer(ctx, location, invitation) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ConsumerInvitationsClient", "RejectInvitation", nil, "Failure preparing request") + return + } + + resp, err := client.RejectInvitationSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.ConsumerInvitationsClient", "RejectInvitation", resp, "Failure sending request") + return + } + + result, err = client.RejectInvitationResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ConsumerInvitationsClient", "RejectInvitation", resp, "Failure responding to request") + } + + return +} + +// RejectInvitationPreparer prepares the RejectInvitation request. +func (client ConsumerInvitationsClient) RejectInvitationPreparer(ctx context.Context, location string, invitation ConsumerInvitation) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "location": autorest.Encode("path", location), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/providers/Microsoft.DataShare/locations/{location}/RejectInvitation", pathParameters), + autorest.WithJSON(invitation), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// RejectInvitationSender sends the RejectInvitation request. The method will close the +// http.Response Body if it receives an error. +func (client ConsumerInvitationsClient) RejectInvitationSender(req *http.Request) (*http.Response, error) { + return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) +} + +// RejectInvitationResponder handles the response to the RejectInvitation request. The method always +// closes the http.Response Body. +func (client ConsumerInvitationsClient) RejectInvitationResponder(resp *http.Response) (result ConsumerInvitation, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/consumersourcedatasets.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/consumersourcedatasets.go new file mode 100644 index 000000000000..4a6e2fcf4975 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/consumersourcedatasets.go @@ -0,0 +1,163 @@ +package datashare + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// ConsumerSourceDataSetsClient is the creates a Microsoft.DataShare management client. +type ConsumerSourceDataSetsClient struct { + BaseClient +} + +// NewConsumerSourceDataSetsClient creates an instance of the ConsumerSourceDataSetsClient client. +func NewConsumerSourceDataSetsClient(subscriptionID string) ConsumerSourceDataSetsClient { + return NewConsumerSourceDataSetsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewConsumerSourceDataSetsClientWithBaseURI creates an instance of the ConsumerSourceDataSetsClient client using a +// custom endpoint. Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, +// Azure stack). +func NewConsumerSourceDataSetsClientWithBaseURI(baseURI string, subscriptionID string) ConsumerSourceDataSetsClient { + return ConsumerSourceDataSetsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// ListByShareSubscription get source dataSets of a shareSubscription +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareSubscriptionName - the name of the shareSubscription. +// skipToken - continuation token +func (client ConsumerSourceDataSetsClient) ListByShareSubscription(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, skipToken string) (result ConsumerSourceDataSetListPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ConsumerSourceDataSetsClient.ListByShareSubscription") + defer func() { + sc := -1 + if result.csdsl.Response.Response != nil { + sc = result.csdsl.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listByShareSubscriptionNextResults + req, err := client.ListByShareSubscriptionPreparer(ctx, resourceGroupName, accountName, shareSubscriptionName, skipToken) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ConsumerSourceDataSetsClient", "ListByShareSubscription", nil, "Failure preparing request") + return + } + + resp, err := client.ListByShareSubscriptionSender(req) + if err != nil { + result.csdsl.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.ConsumerSourceDataSetsClient", "ListByShareSubscription", resp, "Failure sending request") + return + } + + result.csdsl, err = client.ListByShareSubscriptionResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ConsumerSourceDataSetsClient", "ListByShareSubscription", resp, "Failure responding to request") + } + + return +} + +// ListByShareSubscriptionPreparer prepares the ListByShareSubscription request. +func (client ConsumerSourceDataSetsClient) ListByShareSubscriptionPreparer(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, skipToken string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareSubscriptionName": autorest.Encode("path", shareSubscriptionName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(skipToken) > 0 { + queryParameters["$skipToken"] = autorest.Encode("query", skipToken) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/ConsumerSourceDataSets", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByShareSubscriptionSender sends the ListByShareSubscription request. The method will close the +// http.Response Body if it receives an error. +func (client ConsumerSourceDataSetsClient) ListByShareSubscriptionSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListByShareSubscriptionResponder handles the response to the ListByShareSubscription request. The method always +// closes the http.Response Body. +func (client ConsumerSourceDataSetsClient) ListByShareSubscriptionResponder(resp *http.Response) (result ConsumerSourceDataSetList, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByShareSubscriptionNextResults retrieves the next set of results, if any. +func (client ConsumerSourceDataSetsClient) listByShareSubscriptionNextResults(ctx context.Context, lastResults ConsumerSourceDataSetList) (result ConsumerSourceDataSetList, err error) { + req, err := lastResults.consumerSourceDataSetListPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "datashare.ConsumerSourceDataSetsClient", "listByShareSubscriptionNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByShareSubscriptionSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "datashare.ConsumerSourceDataSetsClient", "listByShareSubscriptionNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByShareSubscriptionResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ConsumerSourceDataSetsClient", "listByShareSubscriptionNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByShareSubscriptionComplete enumerates all values, automatically crossing page boundaries as required. +func (client ConsumerSourceDataSetsClient) ListByShareSubscriptionComplete(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, skipToken string) (result ConsumerSourceDataSetListIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ConsumerSourceDataSetsClient.ListByShareSubscription") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListByShareSubscription(ctx, resourceGroupName, accountName, shareSubscriptionName, skipToken) + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/datasetmappings.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/datasetmappings.go new file mode 100644 index 000000000000..950ce174ae7b --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/datasetmappings.go @@ -0,0 +1,404 @@ +package datashare + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// DataSetMappingsClient is the creates a Microsoft.DataShare management client. +type DataSetMappingsClient struct { + BaseClient +} + +// NewDataSetMappingsClient creates an instance of the DataSetMappingsClient client. +func NewDataSetMappingsClient(subscriptionID string) DataSetMappingsClient { + return NewDataSetMappingsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewDataSetMappingsClientWithBaseURI creates an instance of the DataSetMappingsClient client using a custom endpoint. +// Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). +func NewDataSetMappingsClientWithBaseURI(baseURI string, subscriptionID string) DataSetMappingsClient { + return DataSetMappingsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// Create create a DataSetMapping +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareSubscriptionName - the name of the share subscription which will hold the data set sink. +// dataSetMappingName - the name of the data set mapping to be created. +// dataSetMapping - destination data set configuration details. +func (client DataSetMappingsClient) Create(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, dataSetMappingName string, dataSetMapping BasicDataSetMapping) (result DataSetMappingModel, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/DataSetMappingsClient.Create") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.CreatePreparer(ctx, resourceGroupName, accountName, shareSubscriptionName, dataSetMappingName, dataSetMapping) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.DataSetMappingsClient", "Create", nil, "Failure preparing request") + return + } + + resp, err := client.CreateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.DataSetMappingsClient", "Create", resp, "Failure sending request") + return + } + + result, err = client.CreateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.DataSetMappingsClient", "Create", resp, "Failure responding to request") + } + + return +} + +// CreatePreparer prepares the Create request. +func (client DataSetMappingsClient) CreatePreparer(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, dataSetMappingName string, dataSetMapping BasicDataSetMapping) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "dataSetMappingName": autorest.Encode("path", dataSetMappingName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareSubscriptionName": autorest.Encode("path", shareSubscriptionName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/dataSetMappings/{dataSetMappingName}", pathParameters), + autorest.WithJSON(dataSetMapping), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateSender sends the Create request. The method will close the +// http.Response Body if it receives an error. +func (client DataSetMappingsClient) CreateSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// CreateResponder handles the response to the Create request. The method always +// closes the http.Response Body. +func (client DataSetMappingsClient) CreateResponder(resp *http.Response) (result DataSetMappingModel, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete delete a DataSetMapping in a shareSubscription +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareSubscriptionName - the name of the shareSubscription. +// dataSetMappingName - the name of the dataSetMapping. +func (client DataSetMappingsClient) Delete(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, dataSetMappingName string) (result autorest.Response, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/DataSetMappingsClient.Delete") + defer func() { + sc := -1 + if result.Response != nil { + sc = result.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.DeletePreparer(ctx, resourceGroupName, accountName, shareSubscriptionName, dataSetMappingName) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.DataSetMappingsClient", "Delete", nil, "Failure preparing request") + return + } + + resp, err := client.DeleteSender(req) + if err != nil { + result.Response = resp + err = autorest.NewErrorWithError(err, "datashare.DataSetMappingsClient", "Delete", resp, "Failure sending request") + return + } + + result, err = client.DeleteResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.DataSetMappingsClient", "Delete", resp, "Failure responding to request") + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client DataSetMappingsClient) DeletePreparer(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, dataSetMappingName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "dataSetMappingName": autorest.Encode("path", dataSetMappingName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareSubscriptionName": autorest.Encode("path", shareSubscriptionName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/dataSetMappings/{dataSetMappingName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client DataSetMappingsClient) DeleteSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client DataSetMappingsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent), + autorest.ByClosing()) + result.Response = resp + return +} + +// Get get a DataSetMapping in a shareSubscription +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareSubscriptionName - the name of the shareSubscription. +// dataSetMappingName - the name of the dataSetMapping. +func (client DataSetMappingsClient) Get(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, dataSetMappingName string) (result DataSetMappingModel, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/DataSetMappingsClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.GetPreparer(ctx, resourceGroupName, accountName, shareSubscriptionName, dataSetMappingName) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.DataSetMappingsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.DataSetMappingsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.DataSetMappingsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client DataSetMappingsClient) GetPreparer(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, dataSetMappingName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "dataSetMappingName": autorest.Encode("path", dataSetMappingName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareSubscriptionName": autorest.Encode("path", shareSubscriptionName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/dataSetMappings/{dataSetMappingName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client DataSetMappingsClient) GetSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client DataSetMappingsClient) GetResponder(resp *http.Response) (result DataSetMappingModel, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByShareSubscription list DataSetMappings in a share subscription +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareSubscriptionName - the name of the share subscription. +// skipToken - continuation token +func (client DataSetMappingsClient) ListByShareSubscription(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, skipToken string) (result DataSetMappingListPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/DataSetMappingsClient.ListByShareSubscription") + defer func() { + sc := -1 + if result.dsml.Response.Response != nil { + sc = result.dsml.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listByShareSubscriptionNextResults + req, err := client.ListByShareSubscriptionPreparer(ctx, resourceGroupName, accountName, shareSubscriptionName, skipToken) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.DataSetMappingsClient", "ListByShareSubscription", nil, "Failure preparing request") + return + } + + resp, err := client.ListByShareSubscriptionSender(req) + if err != nil { + result.dsml.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.DataSetMappingsClient", "ListByShareSubscription", resp, "Failure sending request") + return + } + + result.dsml, err = client.ListByShareSubscriptionResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.DataSetMappingsClient", "ListByShareSubscription", resp, "Failure responding to request") + } + + return +} + +// ListByShareSubscriptionPreparer prepares the ListByShareSubscription request. +func (client DataSetMappingsClient) ListByShareSubscriptionPreparer(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, skipToken string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareSubscriptionName": autorest.Encode("path", shareSubscriptionName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(skipToken) > 0 { + queryParameters["$skipToken"] = autorest.Encode("query", skipToken) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/dataSetMappings", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByShareSubscriptionSender sends the ListByShareSubscription request. The method will close the +// http.Response Body if it receives an error. +func (client DataSetMappingsClient) ListByShareSubscriptionSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListByShareSubscriptionResponder handles the response to the ListByShareSubscription request. The method always +// closes the http.Response Body. +func (client DataSetMappingsClient) ListByShareSubscriptionResponder(resp *http.Response) (result DataSetMappingList, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByShareSubscriptionNextResults retrieves the next set of results, if any. +func (client DataSetMappingsClient) listByShareSubscriptionNextResults(ctx context.Context, lastResults DataSetMappingList) (result DataSetMappingList, err error) { + req, err := lastResults.dataSetMappingListPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "datashare.DataSetMappingsClient", "listByShareSubscriptionNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByShareSubscriptionSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "datashare.DataSetMappingsClient", "listByShareSubscriptionNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByShareSubscriptionResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.DataSetMappingsClient", "listByShareSubscriptionNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByShareSubscriptionComplete enumerates all values, automatically crossing page boundaries as required. +func (client DataSetMappingsClient) ListByShareSubscriptionComplete(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, skipToken string) (result DataSetMappingListIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/DataSetMappingsClient.ListByShareSubscription") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListByShareSubscription(ctx, resourceGroupName, accountName, shareSubscriptionName, skipToken) + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/datasets.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/datasets.go new file mode 100644 index 000000000000..8bd6ae6a6b99 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/datasets.go @@ -0,0 +1,404 @@ +package datashare + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// DataSetsClient is the creates a Microsoft.DataShare management client. +type DataSetsClient struct { + BaseClient +} + +// NewDataSetsClient creates an instance of the DataSetsClient client. +func NewDataSetsClient(subscriptionID string) DataSetsClient { + return NewDataSetsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewDataSetsClientWithBaseURI creates an instance of the DataSetsClient client using a custom endpoint. Use this +// when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). +func NewDataSetsClientWithBaseURI(baseURI string, subscriptionID string) DataSetsClient { + return DataSetsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// Create create a DataSet +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share to add the data set to. +// dataSetName - the name of the dataSet. +// dataSet - the new data set information. +func (client DataSetsClient) Create(ctx context.Context, resourceGroupName string, accountName string, shareName string, dataSetName string, dataSet BasicDataSet) (result DataSetModel, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/DataSetsClient.Create") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.CreatePreparer(ctx, resourceGroupName, accountName, shareName, dataSetName, dataSet) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.DataSetsClient", "Create", nil, "Failure preparing request") + return + } + + resp, err := client.CreateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.DataSetsClient", "Create", resp, "Failure sending request") + return + } + + result, err = client.CreateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.DataSetsClient", "Create", resp, "Failure responding to request") + } + + return +} + +// CreatePreparer prepares the Create request. +func (client DataSetsClient) CreatePreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string, dataSetName string, dataSet BasicDataSet) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "dataSetName": autorest.Encode("path", dataSetName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets/{dataSetName}", pathParameters), + autorest.WithJSON(dataSet), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateSender sends the Create request. The method will close the +// http.Response Body if it receives an error. +func (client DataSetsClient) CreateSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// CreateResponder handles the response to the Create request. The method always +// closes the http.Response Body. +func (client DataSetsClient) CreateResponder(resp *http.Response) (result DataSetModel, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete delete a DataSet in a share +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share. +// dataSetName - the name of the dataSet. +func (client DataSetsClient) Delete(ctx context.Context, resourceGroupName string, accountName string, shareName string, dataSetName string) (result DataSetsDeleteFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/DataSetsClient.Delete") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.DeletePreparer(ctx, resourceGroupName, accountName, shareName, dataSetName) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.DataSetsClient", "Delete", nil, "Failure preparing request") + return + } + + result, err = client.DeleteSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.DataSetsClient", "Delete", result.Response(), "Failure sending request") + return + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client DataSetsClient) DeletePreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string, dataSetName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "dataSetName": autorest.Encode("path", dataSetName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets/{dataSetName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client DataSetsClient) DeleteSender(req *http.Request) (future DataSetsDeleteFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client DataSetsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent), + autorest.ByClosing()) + result.Response = resp + return +} + +// Get get a DataSet in a share +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share. +// dataSetName - the name of the dataSet. +func (client DataSetsClient) Get(ctx context.Context, resourceGroupName string, accountName string, shareName string, dataSetName string) (result DataSetModel, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/DataSetsClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.GetPreparer(ctx, resourceGroupName, accountName, shareName, dataSetName) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.DataSetsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.DataSetsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.DataSetsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client DataSetsClient) GetPreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string, dataSetName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "dataSetName": autorest.Encode("path", dataSetName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets/{dataSetName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client DataSetsClient) GetSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client DataSetsClient) GetResponder(resp *http.Response) (result DataSetModel, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByShare list DataSets in a share +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share. +// skipToken - continuation token +func (client DataSetsClient) ListByShare(ctx context.Context, resourceGroupName string, accountName string, shareName string, skipToken string) (result DataSetListPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/DataSetsClient.ListByShare") + defer func() { + sc := -1 + if result.dsl.Response.Response != nil { + sc = result.dsl.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listByShareNextResults + req, err := client.ListBySharePreparer(ctx, resourceGroupName, accountName, shareName, skipToken) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.DataSetsClient", "ListByShare", nil, "Failure preparing request") + return + } + + resp, err := client.ListByShareSender(req) + if err != nil { + result.dsl.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.DataSetsClient", "ListByShare", resp, "Failure sending request") + return + } + + result.dsl, err = client.ListByShareResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.DataSetsClient", "ListByShare", resp, "Failure responding to request") + } + + return +} + +// ListBySharePreparer prepares the ListByShare request. +func (client DataSetsClient) ListBySharePreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string, skipToken string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(skipToken) > 0 { + queryParameters["$skipToken"] = autorest.Encode("query", skipToken) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/dataSets", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByShareSender sends the ListByShare request. The method will close the +// http.Response Body if it receives an error. +func (client DataSetsClient) ListByShareSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListByShareResponder handles the response to the ListByShare request. The method always +// closes the http.Response Body. +func (client DataSetsClient) ListByShareResponder(resp *http.Response) (result DataSetList, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByShareNextResults retrieves the next set of results, if any. +func (client DataSetsClient) listByShareNextResults(ctx context.Context, lastResults DataSetList) (result DataSetList, err error) { + req, err := lastResults.dataSetListPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "datashare.DataSetsClient", "listByShareNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByShareSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "datashare.DataSetsClient", "listByShareNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByShareResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.DataSetsClient", "listByShareNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByShareComplete enumerates all values, automatically crossing page boundaries as required. +func (client DataSetsClient) ListByShareComplete(ctx context.Context, resourceGroupName string, accountName string, shareName string, skipToken string) (result DataSetListIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/DataSetsClient.ListByShare") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListByShare(ctx, resourceGroupName, accountName, shareName, skipToken) + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/invitations.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/invitations.go new file mode 100644 index 000000000000..1482860b3e97 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/invitations.go @@ -0,0 +1,404 @@ +package datashare + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// InvitationsClient is the creates a Microsoft.DataShare management client. +type InvitationsClient struct { + BaseClient +} + +// NewInvitationsClient creates an instance of the InvitationsClient client. +func NewInvitationsClient(subscriptionID string) InvitationsClient { + return NewInvitationsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewInvitationsClientWithBaseURI creates an instance of the InvitationsClient client using a custom endpoint. Use +// this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). +func NewInvitationsClientWithBaseURI(baseURI string, subscriptionID string) InvitationsClient { + return InvitationsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// Create create an invitation +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share to send the invitation for. +// invitationName - the name of the invitation. +// invitation - invitation details. +func (client InvitationsClient) Create(ctx context.Context, resourceGroupName string, accountName string, shareName string, invitationName string, invitation Invitation) (result Invitation, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InvitationsClient.Create") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.CreatePreparer(ctx, resourceGroupName, accountName, shareName, invitationName, invitation) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.InvitationsClient", "Create", nil, "Failure preparing request") + return + } + + resp, err := client.CreateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.InvitationsClient", "Create", resp, "Failure sending request") + return + } + + result, err = client.CreateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.InvitationsClient", "Create", resp, "Failure responding to request") + } + + return +} + +// CreatePreparer prepares the Create request. +func (client InvitationsClient) CreatePreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string, invitationName string, invitation Invitation) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "invitationName": autorest.Encode("path", invitationName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/invitations/{invitationName}", pathParameters), + autorest.WithJSON(invitation), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateSender sends the Create request. The method will close the +// http.Response Body if it receives an error. +func (client InvitationsClient) CreateSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// CreateResponder handles the response to the Create request. The method always +// closes the http.Response Body. +func (client InvitationsClient) CreateResponder(resp *http.Response) (result Invitation, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete delete an invitation in a share +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share. +// invitationName - the name of the invitation. +func (client InvitationsClient) Delete(ctx context.Context, resourceGroupName string, accountName string, shareName string, invitationName string) (result autorest.Response, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InvitationsClient.Delete") + defer func() { + sc := -1 + if result.Response != nil { + sc = result.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.DeletePreparer(ctx, resourceGroupName, accountName, shareName, invitationName) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.InvitationsClient", "Delete", nil, "Failure preparing request") + return + } + + resp, err := client.DeleteSender(req) + if err != nil { + result.Response = resp + err = autorest.NewErrorWithError(err, "datashare.InvitationsClient", "Delete", resp, "Failure sending request") + return + } + + result, err = client.DeleteResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.InvitationsClient", "Delete", resp, "Failure responding to request") + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client InvitationsClient) DeletePreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string, invitationName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "invitationName": autorest.Encode("path", invitationName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/invitations/{invitationName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client InvitationsClient) DeleteSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client InvitationsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent), + autorest.ByClosing()) + result.Response = resp + return +} + +// Get get an invitation in a share +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share. +// invitationName - the name of the invitation. +func (client InvitationsClient) Get(ctx context.Context, resourceGroupName string, accountName string, shareName string, invitationName string) (result Invitation, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InvitationsClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.GetPreparer(ctx, resourceGroupName, accountName, shareName, invitationName) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.InvitationsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.InvitationsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.InvitationsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client InvitationsClient) GetPreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string, invitationName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "invitationName": autorest.Encode("path", invitationName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/invitations/{invitationName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client InvitationsClient) GetSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client InvitationsClient) GetResponder(resp *http.Response) (result Invitation, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByShare list invitations in a share +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share. +// skipToken - the continuation token +func (client InvitationsClient) ListByShare(ctx context.Context, resourceGroupName string, accountName string, shareName string, skipToken string) (result InvitationListPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InvitationsClient.ListByShare") + defer func() { + sc := -1 + if result.il.Response.Response != nil { + sc = result.il.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listByShareNextResults + req, err := client.ListBySharePreparer(ctx, resourceGroupName, accountName, shareName, skipToken) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.InvitationsClient", "ListByShare", nil, "Failure preparing request") + return + } + + resp, err := client.ListByShareSender(req) + if err != nil { + result.il.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.InvitationsClient", "ListByShare", resp, "Failure sending request") + return + } + + result.il, err = client.ListByShareResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.InvitationsClient", "ListByShare", resp, "Failure responding to request") + } + + return +} + +// ListBySharePreparer prepares the ListByShare request. +func (client InvitationsClient) ListBySharePreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string, skipToken string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(skipToken) > 0 { + queryParameters["$skipToken"] = autorest.Encode("query", skipToken) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/invitations", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByShareSender sends the ListByShare request. The method will close the +// http.Response Body if it receives an error. +func (client InvitationsClient) ListByShareSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListByShareResponder handles the response to the ListByShare request. The method always +// closes the http.Response Body. +func (client InvitationsClient) ListByShareResponder(resp *http.Response) (result InvitationList, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByShareNextResults retrieves the next set of results, if any. +func (client InvitationsClient) listByShareNextResults(ctx context.Context, lastResults InvitationList) (result InvitationList, err error) { + req, err := lastResults.invitationListPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "datashare.InvitationsClient", "listByShareNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByShareSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "datashare.InvitationsClient", "listByShareNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByShareResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.InvitationsClient", "listByShareNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByShareComplete enumerates all values, automatically crossing page boundaries as required. +func (client InvitationsClient) ListByShareComplete(ctx context.Context, resourceGroupName string, accountName string, shareName string, skipToken string) (result InvitationListIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InvitationsClient.ListByShare") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListByShare(ctx, resourceGroupName, accountName, shareName, skipToken) + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/models.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/models.go new file mode 100644 index 000000000000..635c9959b48d --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/models.go @@ -0,0 +1,8908 @@ +package datashare + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "encoding/json" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/autorest/date" + "github.com/Azure/go-autorest/autorest/to" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// The package's fully qualified name. +const fqdn = "github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare" + +// DataSetMappingStatus enumerates the values for data set mapping status. +type DataSetMappingStatus string + +const ( + // Broken ... + Broken DataSetMappingStatus = "Broken" + // Ok ... + Ok DataSetMappingStatus = "Ok" +) + +// PossibleDataSetMappingStatusValues returns an array of possible values for the DataSetMappingStatus const type. +func PossibleDataSetMappingStatusValues() []DataSetMappingStatus { + return []DataSetMappingStatus{Broken, Ok} +} + +// DataSetType enumerates the values for data set type. +type DataSetType string + +const ( + // AdlsGen1File ... + AdlsGen1File DataSetType = "AdlsGen1File" + // AdlsGen1Folder ... + AdlsGen1Folder DataSetType = "AdlsGen1Folder" + // AdlsGen2File ... + AdlsGen2File DataSetType = "AdlsGen2File" + // AdlsGen2FileSystem ... + AdlsGen2FileSystem DataSetType = "AdlsGen2FileSystem" + // AdlsGen2Folder ... + AdlsGen2Folder DataSetType = "AdlsGen2Folder" + // Blob ... + Blob DataSetType = "Blob" + // BlobFolder ... + BlobFolder DataSetType = "BlobFolder" + // Container ... + Container DataSetType = "Container" + // KustoCluster ... + KustoCluster DataSetType = "KustoCluster" + // KustoDatabase ... + KustoDatabase DataSetType = "KustoDatabase" + // SQLDBTable ... + SQLDBTable DataSetType = "SqlDBTable" + // SQLDWTable ... + SQLDWTable DataSetType = "SqlDWTable" +) + +// PossibleDataSetTypeValues returns an array of possible values for the DataSetType const type. +func PossibleDataSetTypeValues() []DataSetType { + return []DataSetType{AdlsGen1File, AdlsGen1Folder, AdlsGen2File, AdlsGen2FileSystem, AdlsGen2Folder, Blob, BlobFolder, Container, KustoCluster, KustoDatabase, SQLDBTable, SQLDWTable} +} + +// InvitationStatus enumerates the values for invitation status. +type InvitationStatus string + +const ( + // Accepted ... + Accepted InvitationStatus = "Accepted" + // Pending ... + Pending InvitationStatus = "Pending" + // Rejected ... + Rejected InvitationStatus = "Rejected" + // Withdrawn ... + Withdrawn InvitationStatus = "Withdrawn" +) + +// PossibleInvitationStatusValues returns an array of possible values for the InvitationStatus const type. +func PossibleInvitationStatusValues() []InvitationStatus { + return []InvitationStatus{Accepted, Pending, Rejected, Withdrawn} +} + +// Kind enumerates the values for kind. +type Kind string + +const ( + // KindAdlsGen1File ... + KindAdlsGen1File Kind = "AdlsGen1File" + // KindAdlsGen1Folder ... + KindAdlsGen1Folder Kind = "AdlsGen1Folder" + // KindAdlsGen2File ... + KindAdlsGen2File Kind = "AdlsGen2File" + // KindAdlsGen2FileSystem ... + KindAdlsGen2FileSystem Kind = "AdlsGen2FileSystem" + // KindAdlsGen2Folder ... + KindAdlsGen2Folder Kind = "AdlsGen2Folder" + // KindBlob ... + KindBlob Kind = "Blob" + // KindBlobFolder ... + KindBlobFolder Kind = "BlobFolder" + // KindContainer ... + KindContainer Kind = "Container" + // KindDataSet ... + KindDataSet Kind = "DataSet" + // KindKustoCluster ... + KindKustoCluster Kind = "KustoCluster" + // KindKustoDatabase ... + KindKustoDatabase Kind = "KustoDatabase" + // KindSQLDBTable ... + KindSQLDBTable Kind = "SqlDBTable" + // KindSQLDWTable ... + KindSQLDWTable Kind = "SqlDWTable" +) + +// PossibleKindValues returns an array of possible values for the Kind const type. +func PossibleKindValues() []Kind { + return []Kind{KindAdlsGen1File, KindAdlsGen1Folder, KindAdlsGen2File, KindAdlsGen2FileSystem, KindAdlsGen2Folder, KindBlob, KindBlobFolder, KindContainer, KindDataSet, KindKustoCluster, KindKustoDatabase, KindSQLDBTable, KindSQLDWTable} +} + +// KindBasicDataSetMapping enumerates the values for kind basic data set mapping. +type KindBasicDataSetMapping string + +const ( + // KindBasicDataSetMappingKindAdlsGen2File ... + KindBasicDataSetMappingKindAdlsGen2File KindBasicDataSetMapping = "AdlsGen2File" + // KindBasicDataSetMappingKindAdlsGen2FileSystem ... + KindBasicDataSetMappingKindAdlsGen2FileSystem KindBasicDataSetMapping = "AdlsGen2FileSystem" + // KindBasicDataSetMappingKindAdlsGen2Folder ... + KindBasicDataSetMappingKindAdlsGen2Folder KindBasicDataSetMapping = "AdlsGen2Folder" + // KindBasicDataSetMappingKindBlob ... + KindBasicDataSetMappingKindBlob KindBasicDataSetMapping = "Blob" + // KindBasicDataSetMappingKindBlobFolder ... + KindBasicDataSetMappingKindBlobFolder KindBasicDataSetMapping = "BlobFolder" + // KindBasicDataSetMappingKindContainer ... + KindBasicDataSetMappingKindContainer KindBasicDataSetMapping = "Container" + // KindBasicDataSetMappingKindDataSetMapping ... + KindBasicDataSetMappingKindDataSetMapping KindBasicDataSetMapping = "DataSetMapping" + // KindBasicDataSetMappingKindKustoCluster ... + KindBasicDataSetMappingKindKustoCluster KindBasicDataSetMapping = "KustoCluster" + // KindBasicDataSetMappingKindKustoDatabase ... + KindBasicDataSetMappingKindKustoDatabase KindBasicDataSetMapping = "KustoDatabase" + // KindBasicDataSetMappingKindSQLDBTable ... + KindBasicDataSetMappingKindSQLDBTable KindBasicDataSetMapping = "SqlDBTable" + // KindBasicDataSetMappingKindSQLDWTable ... + KindBasicDataSetMappingKindSQLDWTable KindBasicDataSetMapping = "SqlDWTable" +) + +// PossibleKindBasicDataSetMappingValues returns an array of possible values for the KindBasicDataSetMapping const type. +func PossibleKindBasicDataSetMappingValues() []KindBasicDataSetMapping { + return []KindBasicDataSetMapping{KindBasicDataSetMappingKindAdlsGen2File, KindBasicDataSetMappingKindAdlsGen2FileSystem, KindBasicDataSetMappingKindAdlsGen2Folder, KindBasicDataSetMappingKindBlob, KindBasicDataSetMappingKindBlobFolder, KindBasicDataSetMappingKindContainer, KindBasicDataSetMappingKindDataSetMapping, KindBasicDataSetMappingKindKustoCluster, KindBasicDataSetMappingKindKustoDatabase, KindBasicDataSetMappingKindSQLDBTable, KindBasicDataSetMappingKindSQLDWTable} +} + +// KindBasicSourceShareSynchronizationSetting enumerates the values for kind basic source share synchronization +// setting. +type KindBasicSourceShareSynchronizationSetting string + +const ( + // KindScheduleBased ... + KindScheduleBased KindBasicSourceShareSynchronizationSetting = "ScheduleBased" + // KindSourceShareSynchronizationSetting ... + KindSourceShareSynchronizationSetting KindBasicSourceShareSynchronizationSetting = "SourceShareSynchronizationSetting" +) + +// PossibleKindBasicSourceShareSynchronizationSettingValues returns an array of possible values for the KindBasicSourceShareSynchronizationSetting const type. +func PossibleKindBasicSourceShareSynchronizationSettingValues() []KindBasicSourceShareSynchronizationSetting { + return []KindBasicSourceShareSynchronizationSetting{KindScheduleBased, KindSourceShareSynchronizationSetting} +} + +// KindBasicSynchronizationSetting enumerates the values for kind basic synchronization setting. +type KindBasicSynchronizationSetting string + +const ( + // KindBasicSynchronizationSettingKindScheduleBased ... + KindBasicSynchronizationSettingKindScheduleBased KindBasicSynchronizationSetting = "ScheduleBased" + // KindBasicSynchronizationSettingKindSynchronizationSetting ... + KindBasicSynchronizationSettingKindSynchronizationSetting KindBasicSynchronizationSetting = "SynchronizationSetting" +) + +// PossibleKindBasicSynchronizationSettingValues returns an array of possible values for the KindBasicSynchronizationSetting const type. +func PossibleKindBasicSynchronizationSettingValues() []KindBasicSynchronizationSetting { + return []KindBasicSynchronizationSetting{KindBasicSynchronizationSettingKindScheduleBased, KindBasicSynchronizationSettingKindSynchronizationSetting} +} + +// KindBasicTrigger enumerates the values for kind basic trigger. +type KindBasicTrigger string + +const ( + // KindBasicTriggerKindScheduleBased ... + KindBasicTriggerKindScheduleBased KindBasicTrigger = "ScheduleBased" + // KindBasicTriggerKindTrigger ... + KindBasicTriggerKindTrigger KindBasicTrigger = "Trigger" +) + +// PossibleKindBasicTriggerValues returns an array of possible values for the KindBasicTrigger const type. +func PossibleKindBasicTriggerValues() []KindBasicTrigger { + return []KindBasicTrigger{KindBasicTriggerKindScheduleBased, KindBasicTriggerKindTrigger} +} + +// OutputType enumerates the values for output type. +type OutputType string + +const ( + // Csv ... + Csv OutputType = "Csv" + // Parquet ... + Parquet OutputType = "Parquet" +) + +// PossibleOutputTypeValues returns an array of possible values for the OutputType const type. +func PossibleOutputTypeValues() []OutputType { + return []OutputType{Csv, Parquet} +} + +// ProvisioningState enumerates the values for provisioning state. +type ProvisioningState string + +const ( + // Creating ... + Creating ProvisioningState = "Creating" + // Deleting ... + Deleting ProvisioningState = "Deleting" + // Failed ... + Failed ProvisioningState = "Failed" + // Moving ... + Moving ProvisioningState = "Moving" + // Succeeded ... + Succeeded ProvisioningState = "Succeeded" +) + +// PossibleProvisioningStateValues returns an array of possible values for the ProvisioningState const type. +func PossibleProvisioningStateValues() []ProvisioningState { + return []ProvisioningState{Creating, Deleting, Failed, Moving, Succeeded} +} + +// RecurrenceInterval enumerates the values for recurrence interval. +type RecurrenceInterval string + +const ( + // Day ... + Day RecurrenceInterval = "Day" + // Hour ... + Hour RecurrenceInterval = "Hour" +) + +// PossibleRecurrenceIntervalValues returns an array of possible values for the RecurrenceInterval const type. +func PossibleRecurrenceIntervalValues() []RecurrenceInterval { + return []RecurrenceInterval{Day, Hour} +} + +// ShareKind enumerates the values for share kind. +type ShareKind string + +const ( + // CopyBased ... + CopyBased ShareKind = "CopyBased" + // InPlace ... + InPlace ShareKind = "InPlace" +) + +// PossibleShareKindValues returns an array of possible values for the ShareKind const type. +func PossibleShareKindValues() []ShareKind { + return []ShareKind{CopyBased, InPlace} +} + +// ShareSubscriptionStatus enumerates the values for share subscription status. +type ShareSubscriptionStatus string + +const ( + // Active ... + Active ShareSubscriptionStatus = "Active" + // Revoked ... + Revoked ShareSubscriptionStatus = "Revoked" + // Revoking ... + Revoking ShareSubscriptionStatus = "Revoking" + // SourceDeleted ... + SourceDeleted ShareSubscriptionStatus = "SourceDeleted" +) + +// PossibleShareSubscriptionStatusValues returns an array of possible values for the ShareSubscriptionStatus const type. +func PossibleShareSubscriptionStatusValues() []ShareSubscriptionStatus { + return []ShareSubscriptionStatus{Active, Revoked, Revoking, SourceDeleted} +} + +// Status enumerates the values for status. +type Status string + +const ( + // StatusAccepted ... + StatusAccepted Status = "Accepted" + // StatusCanceled ... + StatusCanceled Status = "Canceled" + // StatusFailed ... + StatusFailed Status = "Failed" + // StatusInProgress ... + StatusInProgress Status = "InProgress" + // StatusSucceeded ... + StatusSucceeded Status = "Succeeded" + // StatusTransientFailure ... + StatusTransientFailure Status = "TransientFailure" +) + +// PossibleStatusValues returns an array of possible values for the Status const type. +func PossibleStatusValues() []Status { + return []Status{StatusAccepted, StatusCanceled, StatusFailed, StatusInProgress, StatusSucceeded, StatusTransientFailure} +} + +// SynchronizationMode enumerates the values for synchronization mode. +type SynchronizationMode string + +const ( + // FullSync ... + FullSync SynchronizationMode = "FullSync" + // Incremental ... + Incremental SynchronizationMode = "Incremental" +) + +// PossibleSynchronizationModeValues returns an array of possible values for the SynchronizationMode const type. +func PossibleSynchronizationModeValues() []SynchronizationMode { + return []SynchronizationMode{FullSync, Incremental} +} + +// TriggerStatus enumerates the values for trigger status. +type TriggerStatus string + +const ( + // TriggerStatusActive ... + TriggerStatusActive TriggerStatus = "Active" + // TriggerStatusInactive ... + TriggerStatusInactive TriggerStatus = "Inactive" + // TriggerStatusSourceSynchronizationSettingDeleted ... + TriggerStatusSourceSynchronizationSettingDeleted TriggerStatus = "SourceSynchronizationSettingDeleted" +) + +// PossibleTriggerStatusValues returns an array of possible values for the TriggerStatus const type. +func PossibleTriggerStatusValues() []TriggerStatus { + return []TriggerStatus{TriggerStatusActive, TriggerStatusInactive, TriggerStatusSourceSynchronizationSettingDeleted} +} + +// Type enumerates the values for type. +type Type string + +const ( + // SystemAssigned ... + SystemAssigned Type = "SystemAssigned" +) + +// PossibleTypeValues returns an array of possible values for the Type const type. +func PossibleTypeValues() []Type { + return []Type{SystemAssigned} +} + +// Account an account data transfer object. +type Account struct { + autorest.Response `json:"-"` + // Identity - Identity Info on the Account + Identity *Identity `json:"identity,omitempty"` + // AccountProperties - Properties on the account + *AccountProperties `json:"properties,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Location - Location of the azure resource. + Location *string `json:"location,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Tags - Tags on the azure resource. + Tags map[string]*string `json:"tags"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for Account. +func (a Account) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if a.Identity != nil { + objectMap["identity"] = a.Identity + } + if a.AccountProperties != nil { + objectMap["properties"] = a.AccountProperties + } + if a.Location != nil { + objectMap["location"] = a.Location + } + if a.Tags != nil { + objectMap["tags"] = a.Tags + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for Account struct. +func (a *Account) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "identity": + if v != nil { + var identity Identity + err = json.Unmarshal(*v, &identity) + if err != nil { + return err + } + a.Identity = &identity + } + case "properties": + if v != nil { + var accountProperties AccountProperties + err = json.Unmarshal(*v, &accountProperties) + if err != nil { + return err + } + a.AccountProperties = &accountProperties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + a.ID = &ID + } + case "location": + if v != nil { + var location string + err = json.Unmarshal(*v, &location) + if err != nil { + return err + } + a.Location = &location + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + a.Name = &name + } + case "tags": + if v != nil { + var tags map[string]*string + err = json.Unmarshal(*v, &tags) + if err != nil { + return err + } + a.Tags = tags + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + a.Type = &typeVar + } + } + } + + return nil +} + +// AccountList list response for get Accounts. +type AccountList struct { + autorest.Response `json:"-"` + // NextLink - The Url of next result page. + NextLink *string `json:"nextLink,omitempty"` + // Value - Collection of items of type DataTransferObjects. + Value *[]Account `json:"value,omitempty"` +} + +// AccountListIterator provides access to a complete listing of Account values. +type AccountListIterator struct { + i int + page AccountListPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *AccountListIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/AccountListIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *AccountListIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter AccountListIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter AccountListIterator) Response() AccountList { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter AccountListIterator) Value() Account { + if !iter.page.NotDone() { + return Account{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the AccountListIterator type. +func NewAccountListIterator(page AccountListPage) AccountListIterator { + return AccountListIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (al AccountList) IsEmpty() bool { + return al.Value == nil || len(*al.Value) == 0 +} + +// accountListPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (al AccountList) accountListPreparer(ctx context.Context) (*http.Request, error) { + if al.NextLink == nil || len(to.String(al.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(al.NextLink))) +} + +// AccountListPage contains a page of Account values. +type AccountListPage struct { + fn func(context.Context, AccountList) (AccountList, error) + al AccountList +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *AccountListPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/AccountListPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.al) + if err != nil { + return err + } + page.al = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *AccountListPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page AccountListPage) NotDone() bool { + return !page.al.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page AccountListPage) Response() AccountList { + return page.al +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page AccountListPage) Values() []Account { + if page.al.IsEmpty() { + return nil + } + return *page.al.Value +} + +// Creates a new instance of the AccountListPage type. +func NewAccountListPage(getNextPage func(context.Context, AccountList) (AccountList, error)) AccountListPage { + return AccountListPage{fn: getNextPage} +} + +// AccountProperties account property bag. +type AccountProperties struct { + // CreatedAt - READ-ONLY; Time at which the account was created. + CreatedAt *date.Time `json:"createdAt,omitempty"` + // ProvisioningState - READ-ONLY; Provisioning state of the Account. Possible values include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed' + ProvisioningState ProvisioningState `json:"provisioningState,omitempty"` + // UserEmail - READ-ONLY; Email of the user who created the resource + UserEmail *string `json:"userEmail,omitempty"` + // UserName - READ-ONLY; Name of the user who created the resource + UserName *string `json:"userName,omitempty"` +} + +// AccountsCreateFuture an abstraction for monitoring and retrieving the results of a long-running +// operation. +type AccountsCreateFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *AccountsCreateFuture) Result(client AccountsClient) (a Account, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.AccountsCreateFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("datashare.AccountsCreateFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if a.Response.Response, err = future.GetResult(sender); err == nil && a.Response.Response.StatusCode != http.StatusNoContent { + a, err = client.CreateResponder(a.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.AccountsCreateFuture", "Result", a.Response.Response, "Failure responding to request") + } + } + return +} + +// AccountsDeleteFuture an abstraction for monitoring and retrieving the results of a long-running +// operation. +type AccountsDeleteFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *AccountsDeleteFuture) Result(client AccountsClient) (or OperationResponse, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.AccountsDeleteFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("datashare.AccountsDeleteFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if or.Response.Response, err = future.GetResult(sender); err == nil && or.Response.Response.StatusCode != http.StatusNoContent { + or, err = client.DeleteResponder(or.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.AccountsDeleteFuture", "Result", or.Response.Response, "Failure responding to request") + } + } + return +} + +// AccountUpdateParameters update parameters for accounts +type AccountUpdateParameters struct { + // Tags - Tags on the azure resource. + Tags map[string]*string `json:"tags"` +} + +// MarshalJSON is the custom marshaler for AccountUpdateParameters. +func (aup AccountUpdateParameters) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if aup.Tags != nil { + objectMap["tags"] = aup.Tags + } + return json.Marshal(objectMap) +} + +// ADLSGen1FileDataSet an ADLS Gen 1 file data set. +type ADLSGen1FileDataSet struct { + // ADLSGen1FileProperties - ADLS Gen 1 file data set properties. + *ADLSGen1FileProperties `json:"properties,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` + // Kind - Possible values include: 'KindDataSet', 'KindBlob', 'KindBlobFolder', 'KindContainer', 'KindAdlsGen2File', 'KindAdlsGen2Folder', 'KindAdlsGen2FileSystem', 'KindAdlsGen1Folder', 'KindAdlsGen1File', 'KindKustoCluster', 'KindKustoDatabase', 'KindSQLDWTable', 'KindSQLDBTable' + Kind Kind `json:"kind,omitempty"` +} + +// MarshalJSON is the custom marshaler for ADLSGen1FileDataSet. +func (ag1fds ADLSGen1FileDataSet) MarshalJSON() ([]byte, error) { + ag1fds.Kind = KindAdlsGen1File + objectMap := make(map[string]interface{}) + if ag1fds.ADLSGen1FileProperties != nil { + objectMap["properties"] = ag1fds.ADLSGen1FileProperties + } + if ag1fds.Kind != "" { + objectMap["kind"] = ag1fds.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSet is the BasicDataSet implementation for ADLSGen1FileDataSet. +func (ag1fds ADLSGen1FileDataSet) AsBlobDataSet() (*BlobDataSet, bool) { + return nil, false +} + +// AsBlobFolderDataSet is the BasicDataSet implementation for ADLSGen1FileDataSet. +func (ag1fds ADLSGen1FileDataSet) AsBlobFolderDataSet() (*BlobFolderDataSet, bool) { + return nil, false +} + +// AsBlobContainerDataSet is the BasicDataSet implementation for ADLSGen1FileDataSet. +func (ag1fds ADLSGen1FileDataSet) AsBlobContainerDataSet() (*BlobContainerDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileDataSet is the BasicDataSet implementation for ADLSGen1FileDataSet. +func (ag1fds ADLSGen1FileDataSet) AsADLSGen2FileDataSet() (*ADLSGen2FileDataSet, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSet is the BasicDataSet implementation for ADLSGen1FileDataSet. +func (ag1fds ADLSGen1FileDataSet) AsADLSGen2FolderDataSet() (*ADLSGen2FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSet is the BasicDataSet implementation for ADLSGen1FileDataSet. +func (ag1fds ADLSGen1FileDataSet) AsADLSGen2FileSystemDataSet() (*ADLSGen2FileSystemDataSet, bool) { + return nil, false +} + +// AsADLSGen1FolderDataSet is the BasicDataSet implementation for ADLSGen1FileDataSet. +func (ag1fds ADLSGen1FileDataSet) AsADLSGen1FolderDataSet() (*ADLSGen1FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen1FileDataSet is the BasicDataSet implementation for ADLSGen1FileDataSet. +func (ag1fds ADLSGen1FileDataSet) AsADLSGen1FileDataSet() (*ADLSGen1FileDataSet, bool) { + return &ag1fds, true +} + +// AsKustoClusterDataSet is the BasicDataSet implementation for ADLSGen1FileDataSet. +func (ag1fds ADLSGen1FileDataSet) AsKustoClusterDataSet() (*KustoClusterDataSet, bool) { + return nil, false +} + +// AsKustoDatabaseDataSet is the BasicDataSet implementation for ADLSGen1FileDataSet. +func (ag1fds ADLSGen1FileDataSet) AsKustoDatabaseDataSet() (*KustoDatabaseDataSet, bool) { + return nil, false +} + +// AsSQLDWTableDataSet is the BasicDataSet implementation for ADLSGen1FileDataSet. +func (ag1fds ADLSGen1FileDataSet) AsSQLDWTableDataSet() (*SQLDWTableDataSet, bool) { + return nil, false +} + +// AsSQLDBTableDataSet is the BasicDataSet implementation for ADLSGen1FileDataSet. +func (ag1fds ADLSGen1FileDataSet) AsSQLDBTableDataSet() (*SQLDBTableDataSet, bool) { + return nil, false +} + +// AsDataSet is the BasicDataSet implementation for ADLSGen1FileDataSet. +func (ag1fds ADLSGen1FileDataSet) AsDataSet() (*DataSet, bool) { + return nil, false +} + +// AsBasicDataSet is the BasicDataSet implementation for ADLSGen1FileDataSet. +func (ag1fds ADLSGen1FileDataSet) AsBasicDataSet() (BasicDataSet, bool) { + return &ag1fds, true +} + +// UnmarshalJSON is the custom unmarshaler for ADLSGen1FileDataSet struct. +func (ag1fds *ADLSGen1FileDataSet) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var aDLSGen1FileProperties ADLSGen1FileProperties + err = json.Unmarshal(*v, &aDLSGen1FileProperties) + if err != nil { + return err + } + ag1fds.ADLSGen1FileProperties = &aDLSGen1FileProperties + } + case "kind": + if v != nil { + var kind Kind + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + ag1fds.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + ag1fds.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + ag1fds.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ag1fds.Type = &typeVar + } + } + } + + return nil +} + +// ADLSGen1FileProperties properties of the ADLS Gen1 file data set. +type ADLSGen1FileProperties struct { + // AccountName - The ADLS account name. + AccountName *string `json:"accountName,omitempty"` + // DataSetID - READ-ONLY; Unique id for identifying a data set resource + DataSetID *string `json:"dataSetId,omitempty"` + // FileName - The file name in the ADLS account. + FileName *string `json:"fileName,omitempty"` + // FolderPath - The folder path within the ADLS account. + FolderPath *string `json:"folderPath,omitempty"` + // ResourceGroup - Resource group of ADLS account. + ResourceGroup *string `json:"resourceGroup,omitempty"` + // SubscriptionID - Subscription id of ADLS account. + SubscriptionID *string `json:"subscriptionId,omitempty"` +} + +// ADLSGen1FolderDataSet an ADLS Gen 1 folder data set. +type ADLSGen1FolderDataSet struct { + // ADLSGen1FolderProperties - ADLS Gen 1 folder data set properties. + *ADLSGen1FolderProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindDataSet', 'KindBlob', 'KindBlobFolder', 'KindContainer', 'KindAdlsGen2File', 'KindAdlsGen2Folder', 'KindAdlsGen2FileSystem', 'KindAdlsGen1Folder', 'KindAdlsGen1File', 'KindKustoCluster', 'KindKustoDatabase', 'KindSQLDWTable', 'KindSQLDBTable' + Kind Kind `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for ADLSGen1FolderDataSet. +func (ag1fds ADLSGen1FolderDataSet) MarshalJSON() ([]byte, error) { + ag1fds.Kind = KindAdlsGen1Folder + objectMap := make(map[string]interface{}) + if ag1fds.ADLSGen1FolderProperties != nil { + objectMap["properties"] = ag1fds.ADLSGen1FolderProperties + } + if ag1fds.Kind != "" { + objectMap["kind"] = ag1fds.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSet is the BasicDataSet implementation for ADLSGen1FolderDataSet. +func (ag1fds ADLSGen1FolderDataSet) AsBlobDataSet() (*BlobDataSet, bool) { + return nil, false +} + +// AsBlobFolderDataSet is the BasicDataSet implementation for ADLSGen1FolderDataSet. +func (ag1fds ADLSGen1FolderDataSet) AsBlobFolderDataSet() (*BlobFolderDataSet, bool) { + return nil, false +} + +// AsBlobContainerDataSet is the BasicDataSet implementation for ADLSGen1FolderDataSet. +func (ag1fds ADLSGen1FolderDataSet) AsBlobContainerDataSet() (*BlobContainerDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileDataSet is the BasicDataSet implementation for ADLSGen1FolderDataSet. +func (ag1fds ADLSGen1FolderDataSet) AsADLSGen2FileDataSet() (*ADLSGen2FileDataSet, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSet is the BasicDataSet implementation for ADLSGen1FolderDataSet. +func (ag1fds ADLSGen1FolderDataSet) AsADLSGen2FolderDataSet() (*ADLSGen2FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSet is the BasicDataSet implementation for ADLSGen1FolderDataSet. +func (ag1fds ADLSGen1FolderDataSet) AsADLSGen2FileSystemDataSet() (*ADLSGen2FileSystemDataSet, bool) { + return nil, false +} + +// AsADLSGen1FolderDataSet is the BasicDataSet implementation for ADLSGen1FolderDataSet. +func (ag1fds ADLSGen1FolderDataSet) AsADLSGen1FolderDataSet() (*ADLSGen1FolderDataSet, bool) { + return &ag1fds, true +} + +// AsADLSGen1FileDataSet is the BasicDataSet implementation for ADLSGen1FolderDataSet. +func (ag1fds ADLSGen1FolderDataSet) AsADLSGen1FileDataSet() (*ADLSGen1FileDataSet, bool) { + return nil, false +} + +// AsKustoClusterDataSet is the BasicDataSet implementation for ADLSGen1FolderDataSet. +func (ag1fds ADLSGen1FolderDataSet) AsKustoClusterDataSet() (*KustoClusterDataSet, bool) { + return nil, false +} + +// AsKustoDatabaseDataSet is the BasicDataSet implementation for ADLSGen1FolderDataSet. +func (ag1fds ADLSGen1FolderDataSet) AsKustoDatabaseDataSet() (*KustoDatabaseDataSet, bool) { + return nil, false +} + +// AsSQLDWTableDataSet is the BasicDataSet implementation for ADLSGen1FolderDataSet. +func (ag1fds ADLSGen1FolderDataSet) AsSQLDWTableDataSet() (*SQLDWTableDataSet, bool) { + return nil, false +} + +// AsSQLDBTableDataSet is the BasicDataSet implementation for ADLSGen1FolderDataSet. +func (ag1fds ADLSGen1FolderDataSet) AsSQLDBTableDataSet() (*SQLDBTableDataSet, bool) { + return nil, false +} + +// AsDataSet is the BasicDataSet implementation for ADLSGen1FolderDataSet. +func (ag1fds ADLSGen1FolderDataSet) AsDataSet() (*DataSet, bool) { + return nil, false +} + +// AsBasicDataSet is the BasicDataSet implementation for ADLSGen1FolderDataSet. +func (ag1fds ADLSGen1FolderDataSet) AsBasicDataSet() (BasicDataSet, bool) { + return &ag1fds, true +} + +// UnmarshalJSON is the custom unmarshaler for ADLSGen1FolderDataSet struct. +func (ag1fds *ADLSGen1FolderDataSet) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var aDLSGen1FolderProperties ADLSGen1FolderProperties + err = json.Unmarshal(*v, &aDLSGen1FolderProperties) + if err != nil { + return err + } + ag1fds.ADLSGen1FolderProperties = &aDLSGen1FolderProperties + } + case "kind": + if v != nil { + var kind Kind + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + ag1fds.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + ag1fds.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + ag1fds.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ag1fds.Type = &typeVar + } + } + } + + return nil +} + +// ADLSGen1FolderProperties properties of the ADLS Gen1 folder data set. +type ADLSGen1FolderProperties struct { + // AccountName - The ADLS account name. + AccountName *string `json:"accountName,omitempty"` + // DataSetID - READ-ONLY; Unique id for identifying a data set resource + DataSetID *string `json:"dataSetId,omitempty"` + // FolderPath - The folder path within the ADLS account. + FolderPath *string `json:"folderPath,omitempty"` + // ResourceGroup - Resource group of ADLS account. + ResourceGroup *string `json:"resourceGroup,omitempty"` + // SubscriptionID - Subscription id of ADLS account. + SubscriptionID *string `json:"subscriptionId,omitempty"` +} + +// ADLSGen2FileDataSet an ADLS Gen 2 file data set. +type ADLSGen2FileDataSet struct { + // ADLSGen2FileProperties - ADLS Gen 2 file data set properties. + *ADLSGen2FileProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindDataSet', 'KindBlob', 'KindBlobFolder', 'KindContainer', 'KindAdlsGen2File', 'KindAdlsGen2Folder', 'KindAdlsGen2FileSystem', 'KindAdlsGen1Folder', 'KindAdlsGen1File', 'KindKustoCluster', 'KindKustoDatabase', 'KindSQLDWTable', 'KindSQLDBTable' + Kind Kind `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for ADLSGen2FileDataSet. +func (ag2fds ADLSGen2FileDataSet) MarshalJSON() ([]byte, error) { + ag2fds.Kind = KindAdlsGen2File + objectMap := make(map[string]interface{}) + if ag2fds.ADLSGen2FileProperties != nil { + objectMap["properties"] = ag2fds.ADLSGen2FileProperties + } + if ag2fds.Kind != "" { + objectMap["kind"] = ag2fds.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSet is the BasicDataSet implementation for ADLSGen2FileDataSet. +func (ag2fds ADLSGen2FileDataSet) AsBlobDataSet() (*BlobDataSet, bool) { + return nil, false +} + +// AsBlobFolderDataSet is the BasicDataSet implementation for ADLSGen2FileDataSet. +func (ag2fds ADLSGen2FileDataSet) AsBlobFolderDataSet() (*BlobFolderDataSet, bool) { + return nil, false +} + +// AsBlobContainerDataSet is the BasicDataSet implementation for ADLSGen2FileDataSet. +func (ag2fds ADLSGen2FileDataSet) AsBlobContainerDataSet() (*BlobContainerDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileDataSet is the BasicDataSet implementation for ADLSGen2FileDataSet. +func (ag2fds ADLSGen2FileDataSet) AsADLSGen2FileDataSet() (*ADLSGen2FileDataSet, bool) { + return &ag2fds, true +} + +// AsADLSGen2FolderDataSet is the BasicDataSet implementation for ADLSGen2FileDataSet. +func (ag2fds ADLSGen2FileDataSet) AsADLSGen2FolderDataSet() (*ADLSGen2FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSet is the BasicDataSet implementation for ADLSGen2FileDataSet. +func (ag2fds ADLSGen2FileDataSet) AsADLSGen2FileSystemDataSet() (*ADLSGen2FileSystemDataSet, bool) { + return nil, false +} + +// AsADLSGen1FolderDataSet is the BasicDataSet implementation for ADLSGen2FileDataSet. +func (ag2fds ADLSGen2FileDataSet) AsADLSGen1FolderDataSet() (*ADLSGen1FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen1FileDataSet is the BasicDataSet implementation for ADLSGen2FileDataSet. +func (ag2fds ADLSGen2FileDataSet) AsADLSGen1FileDataSet() (*ADLSGen1FileDataSet, bool) { + return nil, false +} + +// AsKustoClusterDataSet is the BasicDataSet implementation for ADLSGen2FileDataSet. +func (ag2fds ADLSGen2FileDataSet) AsKustoClusterDataSet() (*KustoClusterDataSet, bool) { + return nil, false +} + +// AsKustoDatabaseDataSet is the BasicDataSet implementation for ADLSGen2FileDataSet. +func (ag2fds ADLSGen2FileDataSet) AsKustoDatabaseDataSet() (*KustoDatabaseDataSet, bool) { + return nil, false +} + +// AsSQLDWTableDataSet is the BasicDataSet implementation for ADLSGen2FileDataSet. +func (ag2fds ADLSGen2FileDataSet) AsSQLDWTableDataSet() (*SQLDWTableDataSet, bool) { + return nil, false +} + +// AsSQLDBTableDataSet is the BasicDataSet implementation for ADLSGen2FileDataSet. +func (ag2fds ADLSGen2FileDataSet) AsSQLDBTableDataSet() (*SQLDBTableDataSet, bool) { + return nil, false +} + +// AsDataSet is the BasicDataSet implementation for ADLSGen2FileDataSet. +func (ag2fds ADLSGen2FileDataSet) AsDataSet() (*DataSet, bool) { + return nil, false +} + +// AsBasicDataSet is the BasicDataSet implementation for ADLSGen2FileDataSet. +func (ag2fds ADLSGen2FileDataSet) AsBasicDataSet() (BasicDataSet, bool) { + return &ag2fds, true +} + +// UnmarshalJSON is the custom unmarshaler for ADLSGen2FileDataSet struct. +func (ag2fds *ADLSGen2FileDataSet) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var aDLSGen2FileProperties ADLSGen2FileProperties + err = json.Unmarshal(*v, &aDLSGen2FileProperties) + if err != nil { + return err + } + ag2fds.ADLSGen2FileProperties = &aDLSGen2FileProperties + } + case "kind": + if v != nil { + var kind Kind + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + ag2fds.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + ag2fds.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + ag2fds.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ag2fds.Type = &typeVar + } + } + } + + return nil +} + +// ADLSGen2FileDataSetMapping an ADLS Gen2 file data set mapping. +type ADLSGen2FileDataSetMapping struct { + // ADLSGen2FileDataSetMappingProperties - ADLS Gen2 file data set mapping properties. + *ADLSGen2FileDataSetMappingProperties `json:"properties,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` + // Kind - Possible values include: 'KindBasicDataSetMappingKindDataSetMapping', 'KindBasicDataSetMappingKindBlob', 'KindBasicDataSetMappingKindBlobFolder', 'KindBasicDataSetMappingKindContainer', 'KindBasicDataSetMappingKindAdlsGen2File', 'KindBasicDataSetMappingKindAdlsGen2Folder', 'KindBasicDataSetMappingKindAdlsGen2FileSystem', 'KindBasicDataSetMappingKindKustoCluster', 'KindBasicDataSetMappingKindKustoDatabase', 'KindBasicDataSetMappingKindSQLDWTable', 'KindBasicDataSetMappingKindSQLDBTable' + Kind KindBasicDataSetMapping `json:"kind,omitempty"` +} + +// MarshalJSON is the custom marshaler for ADLSGen2FileDataSetMapping. +func (ag2fdsm ADLSGen2FileDataSetMapping) MarshalJSON() ([]byte, error) { + ag2fdsm.Kind = KindBasicDataSetMappingKindAdlsGen2File + objectMap := make(map[string]interface{}) + if ag2fdsm.ADLSGen2FileDataSetMappingProperties != nil { + objectMap["properties"] = ag2fdsm.ADLSGen2FileDataSetMappingProperties + } + if ag2fdsm.Kind != "" { + objectMap["kind"] = ag2fdsm.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileDataSetMapping. +func (ag2fdsm ADLSGen2FileDataSetMapping) AsBlobDataSetMapping() (*BlobDataSetMapping, bool) { + return nil, false +} + +// AsBlobFolderDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileDataSetMapping. +func (ag2fdsm ADLSGen2FileDataSetMapping) AsBlobFolderDataSetMapping() (*BlobFolderDataSetMapping, bool) { + return nil, false +} + +// AsBlobContainerDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileDataSetMapping. +func (ag2fdsm ADLSGen2FileDataSetMapping) AsBlobContainerDataSetMapping() (*BlobContainerDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FileDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileDataSetMapping. +func (ag2fdsm ADLSGen2FileDataSetMapping) AsADLSGen2FileDataSetMapping() (*ADLSGen2FileDataSetMapping, bool) { + return &ag2fdsm, true +} + +// AsADLSGen2FolderDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileDataSetMapping. +func (ag2fdsm ADLSGen2FileDataSetMapping) AsADLSGen2FolderDataSetMapping() (*ADLSGen2FolderDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileDataSetMapping. +func (ag2fdsm ADLSGen2FileDataSetMapping) AsADLSGen2FileSystemDataSetMapping() (*ADLSGen2FileSystemDataSetMapping, bool) { + return nil, false +} + +// AsKustoClusterDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileDataSetMapping. +func (ag2fdsm ADLSGen2FileDataSetMapping) AsKustoClusterDataSetMapping() (*KustoClusterDataSetMapping, bool) { + return nil, false +} + +// AsKustoDatabaseDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileDataSetMapping. +func (ag2fdsm ADLSGen2FileDataSetMapping) AsKustoDatabaseDataSetMapping() (*KustoDatabaseDataSetMapping, bool) { + return nil, false +} + +// AsSQLDWTableDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileDataSetMapping. +func (ag2fdsm ADLSGen2FileDataSetMapping) AsSQLDWTableDataSetMapping() (*SQLDWTableDataSetMapping, bool) { + return nil, false +} + +// AsSQLDBTableDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileDataSetMapping. +func (ag2fdsm ADLSGen2FileDataSetMapping) AsSQLDBTableDataSetMapping() (*SQLDBTableDataSetMapping, bool) { + return nil, false +} + +// AsDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileDataSetMapping. +func (ag2fdsm ADLSGen2FileDataSetMapping) AsDataSetMapping() (*DataSetMapping, bool) { + return nil, false +} + +// AsBasicDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileDataSetMapping. +func (ag2fdsm ADLSGen2FileDataSetMapping) AsBasicDataSetMapping() (BasicDataSetMapping, bool) { + return &ag2fdsm, true +} + +// UnmarshalJSON is the custom unmarshaler for ADLSGen2FileDataSetMapping struct. +func (ag2fdsm *ADLSGen2FileDataSetMapping) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var aDLSGen2FileDataSetMappingProperties ADLSGen2FileDataSetMappingProperties + err = json.Unmarshal(*v, &aDLSGen2FileDataSetMappingProperties) + if err != nil { + return err + } + ag2fdsm.ADLSGen2FileDataSetMappingProperties = &aDLSGen2FileDataSetMappingProperties + } + case "kind": + if v != nil { + var kind KindBasicDataSetMapping + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + ag2fdsm.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + ag2fdsm.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + ag2fdsm.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ag2fdsm.Type = &typeVar + } + } + } + + return nil +} + +// ADLSGen2FileDataSetMappingProperties ADLS Gen 2 file data set mapping property bag. +type ADLSGen2FileDataSetMappingProperties struct { + // DataSetID - The id of the source data set. + DataSetID *string `json:"dataSetId,omitempty"` + // DataSetMappingStatus - READ-ONLY; Gets the status of the data set mapping. Possible values include: 'Ok', 'Broken' + DataSetMappingStatus DataSetMappingStatus `json:"dataSetMappingStatus,omitempty"` + // FilePath - File path within the file system. + FilePath *string `json:"filePath,omitempty"` + // FileSystem - File system to which the file belongs. + FileSystem *string `json:"fileSystem,omitempty"` + // OutputType - Type of output file. Possible values include: 'Csv', 'Parquet' + OutputType OutputType `json:"outputType,omitempty"` + // ProvisioningState - READ-ONLY; Provisioning state of the data set mapping. Possible values include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed' + ProvisioningState ProvisioningState `json:"provisioningState,omitempty"` + // ResourceGroup - Resource group of storage account. + ResourceGroup *string `json:"resourceGroup,omitempty"` + // StorageAccountName - Storage account name of the source data set. + StorageAccountName *string `json:"storageAccountName,omitempty"` + // SubscriptionID - Subscription id of storage account. + SubscriptionID *string `json:"subscriptionId,omitempty"` +} + +// ADLSGen2FileProperties properties of the ADLS Gen2 file data set. +type ADLSGen2FileProperties struct { + // DataSetID - READ-ONLY; Unique id for identifying a data set resource + DataSetID *string `json:"dataSetId,omitempty"` + // FilePath - File path within the file system. + FilePath *string `json:"filePath,omitempty"` + // FileSystem - File system to which the file belongs. + FileSystem *string `json:"fileSystem,omitempty"` + // ResourceGroup - Resource group of storage account + ResourceGroup *string `json:"resourceGroup,omitempty"` + // StorageAccountName - Storage account name of the source data set + StorageAccountName *string `json:"storageAccountName,omitempty"` + // SubscriptionID - Subscription id of storage account + SubscriptionID *string `json:"subscriptionId,omitempty"` +} + +// ADLSGen2FileSystemDataSet an ADLS Gen 2 file system data set. +type ADLSGen2FileSystemDataSet struct { + // ADLSGen2FileSystemProperties - ADLS Gen 2 file system data set properties. + *ADLSGen2FileSystemProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindDataSet', 'KindBlob', 'KindBlobFolder', 'KindContainer', 'KindAdlsGen2File', 'KindAdlsGen2Folder', 'KindAdlsGen2FileSystem', 'KindAdlsGen1Folder', 'KindAdlsGen1File', 'KindKustoCluster', 'KindKustoDatabase', 'KindSQLDWTable', 'KindSQLDBTable' + Kind Kind `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for ADLSGen2FileSystemDataSet. +func (ag2fsds ADLSGen2FileSystemDataSet) MarshalJSON() ([]byte, error) { + ag2fsds.Kind = KindAdlsGen2FileSystem + objectMap := make(map[string]interface{}) + if ag2fsds.ADLSGen2FileSystemProperties != nil { + objectMap["properties"] = ag2fsds.ADLSGen2FileSystemProperties + } + if ag2fsds.Kind != "" { + objectMap["kind"] = ag2fsds.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSet is the BasicDataSet implementation for ADLSGen2FileSystemDataSet. +func (ag2fsds ADLSGen2FileSystemDataSet) AsBlobDataSet() (*BlobDataSet, bool) { + return nil, false +} + +// AsBlobFolderDataSet is the BasicDataSet implementation for ADLSGen2FileSystemDataSet. +func (ag2fsds ADLSGen2FileSystemDataSet) AsBlobFolderDataSet() (*BlobFolderDataSet, bool) { + return nil, false +} + +// AsBlobContainerDataSet is the BasicDataSet implementation for ADLSGen2FileSystemDataSet. +func (ag2fsds ADLSGen2FileSystemDataSet) AsBlobContainerDataSet() (*BlobContainerDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileDataSet is the BasicDataSet implementation for ADLSGen2FileSystemDataSet. +func (ag2fsds ADLSGen2FileSystemDataSet) AsADLSGen2FileDataSet() (*ADLSGen2FileDataSet, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSet is the BasicDataSet implementation for ADLSGen2FileSystemDataSet. +func (ag2fsds ADLSGen2FileSystemDataSet) AsADLSGen2FolderDataSet() (*ADLSGen2FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSet is the BasicDataSet implementation for ADLSGen2FileSystemDataSet. +func (ag2fsds ADLSGen2FileSystemDataSet) AsADLSGen2FileSystemDataSet() (*ADLSGen2FileSystemDataSet, bool) { + return &ag2fsds, true +} + +// AsADLSGen1FolderDataSet is the BasicDataSet implementation for ADLSGen2FileSystemDataSet. +func (ag2fsds ADLSGen2FileSystemDataSet) AsADLSGen1FolderDataSet() (*ADLSGen1FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen1FileDataSet is the BasicDataSet implementation for ADLSGen2FileSystemDataSet. +func (ag2fsds ADLSGen2FileSystemDataSet) AsADLSGen1FileDataSet() (*ADLSGen1FileDataSet, bool) { + return nil, false +} + +// AsKustoClusterDataSet is the BasicDataSet implementation for ADLSGen2FileSystemDataSet. +func (ag2fsds ADLSGen2FileSystemDataSet) AsKustoClusterDataSet() (*KustoClusterDataSet, bool) { + return nil, false +} + +// AsKustoDatabaseDataSet is the BasicDataSet implementation for ADLSGen2FileSystemDataSet. +func (ag2fsds ADLSGen2FileSystemDataSet) AsKustoDatabaseDataSet() (*KustoDatabaseDataSet, bool) { + return nil, false +} + +// AsSQLDWTableDataSet is the BasicDataSet implementation for ADLSGen2FileSystemDataSet. +func (ag2fsds ADLSGen2FileSystemDataSet) AsSQLDWTableDataSet() (*SQLDWTableDataSet, bool) { + return nil, false +} + +// AsSQLDBTableDataSet is the BasicDataSet implementation for ADLSGen2FileSystemDataSet. +func (ag2fsds ADLSGen2FileSystemDataSet) AsSQLDBTableDataSet() (*SQLDBTableDataSet, bool) { + return nil, false +} + +// AsDataSet is the BasicDataSet implementation for ADLSGen2FileSystemDataSet. +func (ag2fsds ADLSGen2FileSystemDataSet) AsDataSet() (*DataSet, bool) { + return nil, false +} + +// AsBasicDataSet is the BasicDataSet implementation for ADLSGen2FileSystemDataSet. +func (ag2fsds ADLSGen2FileSystemDataSet) AsBasicDataSet() (BasicDataSet, bool) { + return &ag2fsds, true +} + +// UnmarshalJSON is the custom unmarshaler for ADLSGen2FileSystemDataSet struct. +func (ag2fsds *ADLSGen2FileSystemDataSet) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var aDLSGen2FileSystemProperties ADLSGen2FileSystemProperties + err = json.Unmarshal(*v, &aDLSGen2FileSystemProperties) + if err != nil { + return err + } + ag2fsds.ADLSGen2FileSystemProperties = &aDLSGen2FileSystemProperties + } + case "kind": + if v != nil { + var kind Kind + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + ag2fsds.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + ag2fsds.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + ag2fsds.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ag2fsds.Type = &typeVar + } + } + } + + return nil +} + +// ADLSGen2FileSystemDataSetMapping an ADLS Gen2 file system data set mapping. +type ADLSGen2FileSystemDataSetMapping struct { + // ADLSGen2FileSystemDataSetMappingProperties - ADLS Gen2 file system data set mapping properties. + *ADLSGen2FileSystemDataSetMappingProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindBasicDataSetMappingKindDataSetMapping', 'KindBasicDataSetMappingKindBlob', 'KindBasicDataSetMappingKindBlobFolder', 'KindBasicDataSetMappingKindContainer', 'KindBasicDataSetMappingKindAdlsGen2File', 'KindBasicDataSetMappingKindAdlsGen2Folder', 'KindBasicDataSetMappingKindAdlsGen2FileSystem', 'KindBasicDataSetMappingKindKustoCluster', 'KindBasicDataSetMappingKindKustoDatabase', 'KindBasicDataSetMappingKindSQLDWTable', 'KindBasicDataSetMappingKindSQLDBTable' + Kind KindBasicDataSetMapping `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for ADLSGen2FileSystemDataSetMapping. +func (ag2fsdsm ADLSGen2FileSystemDataSetMapping) MarshalJSON() ([]byte, error) { + ag2fsdsm.Kind = KindBasicDataSetMappingKindAdlsGen2FileSystem + objectMap := make(map[string]interface{}) + if ag2fsdsm.ADLSGen2FileSystemDataSetMappingProperties != nil { + objectMap["properties"] = ag2fsdsm.ADLSGen2FileSystemDataSetMappingProperties + } + if ag2fsdsm.Kind != "" { + objectMap["kind"] = ag2fsdsm.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileSystemDataSetMapping. +func (ag2fsdsm ADLSGen2FileSystemDataSetMapping) AsBlobDataSetMapping() (*BlobDataSetMapping, bool) { + return nil, false +} + +// AsBlobFolderDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileSystemDataSetMapping. +func (ag2fsdsm ADLSGen2FileSystemDataSetMapping) AsBlobFolderDataSetMapping() (*BlobFolderDataSetMapping, bool) { + return nil, false +} + +// AsBlobContainerDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileSystemDataSetMapping. +func (ag2fsdsm ADLSGen2FileSystemDataSetMapping) AsBlobContainerDataSetMapping() (*BlobContainerDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FileDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileSystemDataSetMapping. +func (ag2fsdsm ADLSGen2FileSystemDataSetMapping) AsADLSGen2FileDataSetMapping() (*ADLSGen2FileDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileSystemDataSetMapping. +func (ag2fsdsm ADLSGen2FileSystemDataSetMapping) AsADLSGen2FolderDataSetMapping() (*ADLSGen2FolderDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileSystemDataSetMapping. +func (ag2fsdsm ADLSGen2FileSystemDataSetMapping) AsADLSGen2FileSystemDataSetMapping() (*ADLSGen2FileSystemDataSetMapping, bool) { + return &ag2fsdsm, true +} + +// AsKustoClusterDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileSystemDataSetMapping. +func (ag2fsdsm ADLSGen2FileSystemDataSetMapping) AsKustoClusterDataSetMapping() (*KustoClusterDataSetMapping, bool) { + return nil, false +} + +// AsKustoDatabaseDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileSystemDataSetMapping. +func (ag2fsdsm ADLSGen2FileSystemDataSetMapping) AsKustoDatabaseDataSetMapping() (*KustoDatabaseDataSetMapping, bool) { + return nil, false +} + +// AsSQLDWTableDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileSystemDataSetMapping. +func (ag2fsdsm ADLSGen2FileSystemDataSetMapping) AsSQLDWTableDataSetMapping() (*SQLDWTableDataSetMapping, bool) { + return nil, false +} + +// AsSQLDBTableDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileSystemDataSetMapping. +func (ag2fsdsm ADLSGen2FileSystemDataSetMapping) AsSQLDBTableDataSetMapping() (*SQLDBTableDataSetMapping, bool) { + return nil, false +} + +// AsDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileSystemDataSetMapping. +func (ag2fsdsm ADLSGen2FileSystemDataSetMapping) AsDataSetMapping() (*DataSetMapping, bool) { + return nil, false +} + +// AsBasicDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FileSystemDataSetMapping. +func (ag2fsdsm ADLSGen2FileSystemDataSetMapping) AsBasicDataSetMapping() (BasicDataSetMapping, bool) { + return &ag2fsdsm, true +} + +// UnmarshalJSON is the custom unmarshaler for ADLSGen2FileSystemDataSetMapping struct. +func (ag2fsdsm *ADLSGen2FileSystemDataSetMapping) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var aDLSGen2FileSystemDataSetMappingProperties ADLSGen2FileSystemDataSetMappingProperties + err = json.Unmarshal(*v, &aDLSGen2FileSystemDataSetMappingProperties) + if err != nil { + return err + } + ag2fsdsm.ADLSGen2FileSystemDataSetMappingProperties = &aDLSGen2FileSystemDataSetMappingProperties + } + case "kind": + if v != nil { + var kind KindBasicDataSetMapping + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + ag2fsdsm.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + ag2fsdsm.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + ag2fsdsm.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ag2fsdsm.Type = &typeVar + } + } + } + + return nil +} + +// ADLSGen2FileSystemDataSetMappingProperties ADLS Gen 2 file system data set mapping property bag. +type ADLSGen2FileSystemDataSetMappingProperties struct { + // DataSetID - The id of the source data set. + DataSetID *string `json:"dataSetId,omitempty"` + // DataSetMappingStatus - READ-ONLY; Gets the status of the data set mapping. Possible values include: 'Ok', 'Broken' + DataSetMappingStatus DataSetMappingStatus `json:"dataSetMappingStatus,omitempty"` + // FileSystem - The file system name. + FileSystem *string `json:"fileSystem,omitempty"` + // ProvisioningState - READ-ONLY; Provisioning state of the data set mapping. Possible values include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed' + ProvisioningState ProvisioningState `json:"provisioningState,omitempty"` + // ResourceGroup - Resource group of storage account. + ResourceGroup *string `json:"resourceGroup,omitempty"` + // StorageAccountName - Storage account name of the source data set. + StorageAccountName *string `json:"storageAccountName,omitempty"` + // SubscriptionID - Subscription id of storage account. + SubscriptionID *string `json:"subscriptionId,omitempty"` +} + +// ADLSGen2FileSystemProperties properties of the ADLS Gen2 file system data set. +type ADLSGen2FileSystemProperties struct { + // DataSetID - READ-ONLY; Unique id for identifying a data set resource + DataSetID *string `json:"dataSetId,omitempty"` + // FileSystem - The file system name. + FileSystem *string `json:"fileSystem,omitempty"` + // ResourceGroup - Resource group of storage account + ResourceGroup *string `json:"resourceGroup,omitempty"` + // StorageAccountName - Storage account name of the source data set + StorageAccountName *string `json:"storageAccountName,omitempty"` + // SubscriptionID - Subscription id of storage account + SubscriptionID *string `json:"subscriptionId,omitempty"` +} + +// ADLSGen2FolderDataSet an ADLS Gen 2 folder data set. +type ADLSGen2FolderDataSet struct { + // ADLSGen2FolderProperties - ADLS Gen 2 folder data set properties. + *ADLSGen2FolderProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindDataSet', 'KindBlob', 'KindBlobFolder', 'KindContainer', 'KindAdlsGen2File', 'KindAdlsGen2Folder', 'KindAdlsGen2FileSystem', 'KindAdlsGen1Folder', 'KindAdlsGen1File', 'KindKustoCluster', 'KindKustoDatabase', 'KindSQLDWTable', 'KindSQLDBTable' + Kind Kind `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for ADLSGen2FolderDataSet. +func (ag2fds ADLSGen2FolderDataSet) MarshalJSON() ([]byte, error) { + ag2fds.Kind = KindAdlsGen2Folder + objectMap := make(map[string]interface{}) + if ag2fds.ADLSGen2FolderProperties != nil { + objectMap["properties"] = ag2fds.ADLSGen2FolderProperties + } + if ag2fds.Kind != "" { + objectMap["kind"] = ag2fds.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSet is the BasicDataSet implementation for ADLSGen2FolderDataSet. +func (ag2fds ADLSGen2FolderDataSet) AsBlobDataSet() (*BlobDataSet, bool) { + return nil, false +} + +// AsBlobFolderDataSet is the BasicDataSet implementation for ADLSGen2FolderDataSet. +func (ag2fds ADLSGen2FolderDataSet) AsBlobFolderDataSet() (*BlobFolderDataSet, bool) { + return nil, false +} + +// AsBlobContainerDataSet is the BasicDataSet implementation for ADLSGen2FolderDataSet. +func (ag2fds ADLSGen2FolderDataSet) AsBlobContainerDataSet() (*BlobContainerDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileDataSet is the BasicDataSet implementation for ADLSGen2FolderDataSet. +func (ag2fds ADLSGen2FolderDataSet) AsADLSGen2FileDataSet() (*ADLSGen2FileDataSet, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSet is the BasicDataSet implementation for ADLSGen2FolderDataSet. +func (ag2fds ADLSGen2FolderDataSet) AsADLSGen2FolderDataSet() (*ADLSGen2FolderDataSet, bool) { + return &ag2fds, true +} + +// AsADLSGen2FileSystemDataSet is the BasicDataSet implementation for ADLSGen2FolderDataSet. +func (ag2fds ADLSGen2FolderDataSet) AsADLSGen2FileSystemDataSet() (*ADLSGen2FileSystemDataSet, bool) { + return nil, false +} + +// AsADLSGen1FolderDataSet is the BasicDataSet implementation for ADLSGen2FolderDataSet. +func (ag2fds ADLSGen2FolderDataSet) AsADLSGen1FolderDataSet() (*ADLSGen1FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen1FileDataSet is the BasicDataSet implementation for ADLSGen2FolderDataSet. +func (ag2fds ADLSGen2FolderDataSet) AsADLSGen1FileDataSet() (*ADLSGen1FileDataSet, bool) { + return nil, false +} + +// AsKustoClusterDataSet is the BasicDataSet implementation for ADLSGen2FolderDataSet. +func (ag2fds ADLSGen2FolderDataSet) AsKustoClusterDataSet() (*KustoClusterDataSet, bool) { + return nil, false +} + +// AsKustoDatabaseDataSet is the BasicDataSet implementation for ADLSGen2FolderDataSet. +func (ag2fds ADLSGen2FolderDataSet) AsKustoDatabaseDataSet() (*KustoDatabaseDataSet, bool) { + return nil, false +} + +// AsSQLDWTableDataSet is the BasicDataSet implementation for ADLSGen2FolderDataSet. +func (ag2fds ADLSGen2FolderDataSet) AsSQLDWTableDataSet() (*SQLDWTableDataSet, bool) { + return nil, false +} + +// AsSQLDBTableDataSet is the BasicDataSet implementation for ADLSGen2FolderDataSet. +func (ag2fds ADLSGen2FolderDataSet) AsSQLDBTableDataSet() (*SQLDBTableDataSet, bool) { + return nil, false +} + +// AsDataSet is the BasicDataSet implementation for ADLSGen2FolderDataSet. +func (ag2fds ADLSGen2FolderDataSet) AsDataSet() (*DataSet, bool) { + return nil, false +} + +// AsBasicDataSet is the BasicDataSet implementation for ADLSGen2FolderDataSet. +func (ag2fds ADLSGen2FolderDataSet) AsBasicDataSet() (BasicDataSet, bool) { + return &ag2fds, true +} + +// UnmarshalJSON is the custom unmarshaler for ADLSGen2FolderDataSet struct. +func (ag2fds *ADLSGen2FolderDataSet) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var aDLSGen2FolderProperties ADLSGen2FolderProperties + err = json.Unmarshal(*v, &aDLSGen2FolderProperties) + if err != nil { + return err + } + ag2fds.ADLSGen2FolderProperties = &aDLSGen2FolderProperties + } + case "kind": + if v != nil { + var kind Kind + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + ag2fds.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + ag2fds.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + ag2fds.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ag2fds.Type = &typeVar + } + } + } + + return nil +} + +// ADLSGen2FolderDataSetMapping an ADLS Gen2 folder data set mapping. +type ADLSGen2FolderDataSetMapping struct { + // ADLSGen2FolderDataSetMappingProperties - ADLS Gen2 folder data set mapping properties. + *ADLSGen2FolderDataSetMappingProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindBasicDataSetMappingKindDataSetMapping', 'KindBasicDataSetMappingKindBlob', 'KindBasicDataSetMappingKindBlobFolder', 'KindBasicDataSetMappingKindContainer', 'KindBasicDataSetMappingKindAdlsGen2File', 'KindBasicDataSetMappingKindAdlsGen2Folder', 'KindBasicDataSetMappingKindAdlsGen2FileSystem', 'KindBasicDataSetMappingKindKustoCluster', 'KindBasicDataSetMappingKindKustoDatabase', 'KindBasicDataSetMappingKindSQLDWTable', 'KindBasicDataSetMappingKindSQLDBTable' + Kind KindBasicDataSetMapping `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for ADLSGen2FolderDataSetMapping. +func (ag2fdsm ADLSGen2FolderDataSetMapping) MarshalJSON() ([]byte, error) { + ag2fdsm.Kind = KindBasicDataSetMappingKindAdlsGen2Folder + objectMap := make(map[string]interface{}) + if ag2fdsm.ADLSGen2FolderDataSetMappingProperties != nil { + objectMap["properties"] = ag2fdsm.ADLSGen2FolderDataSetMappingProperties + } + if ag2fdsm.Kind != "" { + objectMap["kind"] = ag2fdsm.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FolderDataSetMapping. +func (ag2fdsm ADLSGen2FolderDataSetMapping) AsBlobDataSetMapping() (*BlobDataSetMapping, bool) { + return nil, false +} + +// AsBlobFolderDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FolderDataSetMapping. +func (ag2fdsm ADLSGen2FolderDataSetMapping) AsBlobFolderDataSetMapping() (*BlobFolderDataSetMapping, bool) { + return nil, false +} + +// AsBlobContainerDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FolderDataSetMapping. +func (ag2fdsm ADLSGen2FolderDataSetMapping) AsBlobContainerDataSetMapping() (*BlobContainerDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FileDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FolderDataSetMapping. +func (ag2fdsm ADLSGen2FolderDataSetMapping) AsADLSGen2FileDataSetMapping() (*ADLSGen2FileDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FolderDataSetMapping. +func (ag2fdsm ADLSGen2FolderDataSetMapping) AsADLSGen2FolderDataSetMapping() (*ADLSGen2FolderDataSetMapping, bool) { + return &ag2fdsm, true +} + +// AsADLSGen2FileSystemDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FolderDataSetMapping. +func (ag2fdsm ADLSGen2FolderDataSetMapping) AsADLSGen2FileSystemDataSetMapping() (*ADLSGen2FileSystemDataSetMapping, bool) { + return nil, false +} + +// AsKustoClusterDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FolderDataSetMapping. +func (ag2fdsm ADLSGen2FolderDataSetMapping) AsKustoClusterDataSetMapping() (*KustoClusterDataSetMapping, bool) { + return nil, false +} + +// AsKustoDatabaseDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FolderDataSetMapping. +func (ag2fdsm ADLSGen2FolderDataSetMapping) AsKustoDatabaseDataSetMapping() (*KustoDatabaseDataSetMapping, bool) { + return nil, false +} + +// AsSQLDWTableDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FolderDataSetMapping. +func (ag2fdsm ADLSGen2FolderDataSetMapping) AsSQLDWTableDataSetMapping() (*SQLDWTableDataSetMapping, bool) { + return nil, false +} + +// AsSQLDBTableDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FolderDataSetMapping. +func (ag2fdsm ADLSGen2FolderDataSetMapping) AsSQLDBTableDataSetMapping() (*SQLDBTableDataSetMapping, bool) { + return nil, false +} + +// AsDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FolderDataSetMapping. +func (ag2fdsm ADLSGen2FolderDataSetMapping) AsDataSetMapping() (*DataSetMapping, bool) { + return nil, false +} + +// AsBasicDataSetMapping is the BasicDataSetMapping implementation for ADLSGen2FolderDataSetMapping. +func (ag2fdsm ADLSGen2FolderDataSetMapping) AsBasicDataSetMapping() (BasicDataSetMapping, bool) { + return &ag2fdsm, true +} + +// UnmarshalJSON is the custom unmarshaler for ADLSGen2FolderDataSetMapping struct. +func (ag2fdsm *ADLSGen2FolderDataSetMapping) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var aDLSGen2FolderDataSetMappingProperties ADLSGen2FolderDataSetMappingProperties + err = json.Unmarshal(*v, &aDLSGen2FolderDataSetMappingProperties) + if err != nil { + return err + } + ag2fdsm.ADLSGen2FolderDataSetMappingProperties = &aDLSGen2FolderDataSetMappingProperties + } + case "kind": + if v != nil { + var kind KindBasicDataSetMapping + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + ag2fdsm.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + ag2fdsm.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + ag2fdsm.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ag2fdsm.Type = &typeVar + } + } + } + + return nil +} + +// ADLSGen2FolderDataSetMappingProperties ADLS Gen 2 folder data set mapping property bag. +type ADLSGen2FolderDataSetMappingProperties struct { + // DataSetID - The id of the source data set. + DataSetID *string `json:"dataSetId,omitempty"` + // DataSetMappingStatus - READ-ONLY; Gets the status of the data set mapping. Possible values include: 'Ok', 'Broken' + DataSetMappingStatus DataSetMappingStatus `json:"dataSetMappingStatus,omitempty"` + // FileSystem - File system to which the folder belongs. + FileSystem *string `json:"fileSystem,omitempty"` + // FolderPath - Folder path within the file system. + FolderPath *string `json:"folderPath,omitempty"` + // ProvisioningState - READ-ONLY; Provisioning state of the data set mapping. Possible values include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed' + ProvisioningState ProvisioningState `json:"provisioningState,omitempty"` + // ResourceGroup - Resource group of storage account. + ResourceGroup *string `json:"resourceGroup,omitempty"` + // StorageAccountName - Storage account name of the source data set. + StorageAccountName *string `json:"storageAccountName,omitempty"` + // SubscriptionID - Subscription id of storage account. + SubscriptionID *string `json:"subscriptionId,omitempty"` +} + +// ADLSGen2FolderProperties properties of the ADLS Gen2 folder data set. +type ADLSGen2FolderProperties struct { + // DataSetID - READ-ONLY; Unique id for identifying a data set resource + DataSetID *string `json:"dataSetId,omitempty"` + // FileSystem - File system to which the folder belongs. + FileSystem *string `json:"fileSystem,omitempty"` + // FolderPath - Folder path within the file system. + FolderPath *string `json:"folderPath,omitempty"` + // ResourceGroup - Resource group of storage account + ResourceGroup *string `json:"resourceGroup,omitempty"` + // StorageAccountName - Storage account name of the source data set + StorageAccountName *string `json:"storageAccountName,omitempty"` + // SubscriptionID - Subscription id of storage account + SubscriptionID *string `json:"subscriptionId,omitempty"` +} + +// BlobContainerDataSet an Azure storage blob container data set. +type BlobContainerDataSet struct { + // BlobContainerProperties - Blob container data set properties. + *BlobContainerProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindDataSet', 'KindBlob', 'KindBlobFolder', 'KindContainer', 'KindAdlsGen2File', 'KindAdlsGen2Folder', 'KindAdlsGen2FileSystem', 'KindAdlsGen1Folder', 'KindAdlsGen1File', 'KindKustoCluster', 'KindKustoDatabase', 'KindSQLDWTable', 'KindSQLDBTable' + Kind Kind `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for BlobContainerDataSet. +func (bcds BlobContainerDataSet) MarshalJSON() ([]byte, error) { + bcds.Kind = KindContainer + objectMap := make(map[string]interface{}) + if bcds.BlobContainerProperties != nil { + objectMap["properties"] = bcds.BlobContainerProperties + } + if bcds.Kind != "" { + objectMap["kind"] = bcds.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSet is the BasicDataSet implementation for BlobContainerDataSet. +func (bcds BlobContainerDataSet) AsBlobDataSet() (*BlobDataSet, bool) { + return nil, false +} + +// AsBlobFolderDataSet is the BasicDataSet implementation for BlobContainerDataSet. +func (bcds BlobContainerDataSet) AsBlobFolderDataSet() (*BlobFolderDataSet, bool) { + return nil, false +} + +// AsBlobContainerDataSet is the BasicDataSet implementation for BlobContainerDataSet. +func (bcds BlobContainerDataSet) AsBlobContainerDataSet() (*BlobContainerDataSet, bool) { + return &bcds, true +} + +// AsADLSGen2FileDataSet is the BasicDataSet implementation for BlobContainerDataSet. +func (bcds BlobContainerDataSet) AsADLSGen2FileDataSet() (*ADLSGen2FileDataSet, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSet is the BasicDataSet implementation for BlobContainerDataSet. +func (bcds BlobContainerDataSet) AsADLSGen2FolderDataSet() (*ADLSGen2FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSet is the BasicDataSet implementation for BlobContainerDataSet. +func (bcds BlobContainerDataSet) AsADLSGen2FileSystemDataSet() (*ADLSGen2FileSystemDataSet, bool) { + return nil, false +} + +// AsADLSGen1FolderDataSet is the BasicDataSet implementation for BlobContainerDataSet. +func (bcds BlobContainerDataSet) AsADLSGen1FolderDataSet() (*ADLSGen1FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen1FileDataSet is the BasicDataSet implementation for BlobContainerDataSet. +func (bcds BlobContainerDataSet) AsADLSGen1FileDataSet() (*ADLSGen1FileDataSet, bool) { + return nil, false +} + +// AsKustoClusterDataSet is the BasicDataSet implementation for BlobContainerDataSet. +func (bcds BlobContainerDataSet) AsKustoClusterDataSet() (*KustoClusterDataSet, bool) { + return nil, false +} + +// AsKustoDatabaseDataSet is the BasicDataSet implementation for BlobContainerDataSet. +func (bcds BlobContainerDataSet) AsKustoDatabaseDataSet() (*KustoDatabaseDataSet, bool) { + return nil, false +} + +// AsSQLDWTableDataSet is the BasicDataSet implementation for BlobContainerDataSet. +func (bcds BlobContainerDataSet) AsSQLDWTableDataSet() (*SQLDWTableDataSet, bool) { + return nil, false +} + +// AsSQLDBTableDataSet is the BasicDataSet implementation for BlobContainerDataSet. +func (bcds BlobContainerDataSet) AsSQLDBTableDataSet() (*SQLDBTableDataSet, bool) { + return nil, false +} + +// AsDataSet is the BasicDataSet implementation for BlobContainerDataSet. +func (bcds BlobContainerDataSet) AsDataSet() (*DataSet, bool) { + return nil, false +} + +// AsBasicDataSet is the BasicDataSet implementation for BlobContainerDataSet. +func (bcds BlobContainerDataSet) AsBasicDataSet() (BasicDataSet, bool) { + return &bcds, true +} + +// UnmarshalJSON is the custom unmarshaler for BlobContainerDataSet struct. +func (bcds *BlobContainerDataSet) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var blobContainerProperties BlobContainerProperties + err = json.Unmarshal(*v, &blobContainerProperties) + if err != nil { + return err + } + bcds.BlobContainerProperties = &blobContainerProperties + } + case "kind": + if v != nil { + var kind Kind + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + bcds.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + bcds.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + bcds.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + bcds.Type = &typeVar + } + } + } + + return nil +} + +// BlobContainerDataSetMapping a Blob container data set mapping. +type BlobContainerDataSetMapping struct { + // BlobContainerMappingProperties - Blob container data set mapping properties. + *BlobContainerMappingProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindBasicDataSetMappingKindDataSetMapping', 'KindBasicDataSetMappingKindBlob', 'KindBasicDataSetMappingKindBlobFolder', 'KindBasicDataSetMappingKindContainer', 'KindBasicDataSetMappingKindAdlsGen2File', 'KindBasicDataSetMappingKindAdlsGen2Folder', 'KindBasicDataSetMappingKindAdlsGen2FileSystem', 'KindBasicDataSetMappingKindKustoCluster', 'KindBasicDataSetMappingKindKustoDatabase', 'KindBasicDataSetMappingKindSQLDWTable', 'KindBasicDataSetMappingKindSQLDBTable' + Kind KindBasicDataSetMapping `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for BlobContainerDataSetMapping. +func (bcdsm BlobContainerDataSetMapping) MarshalJSON() ([]byte, error) { + bcdsm.Kind = KindBasicDataSetMappingKindContainer + objectMap := make(map[string]interface{}) + if bcdsm.BlobContainerMappingProperties != nil { + objectMap["properties"] = bcdsm.BlobContainerMappingProperties + } + if bcdsm.Kind != "" { + objectMap["kind"] = bcdsm.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSetMapping is the BasicDataSetMapping implementation for BlobContainerDataSetMapping. +func (bcdsm BlobContainerDataSetMapping) AsBlobDataSetMapping() (*BlobDataSetMapping, bool) { + return nil, false +} + +// AsBlobFolderDataSetMapping is the BasicDataSetMapping implementation for BlobContainerDataSetMapping. +func (bcdsm BlobContainerDataSetMapping) AsBlobFolderDataSetMapping() (*BlobFolderDataSetMapping, bool) { + return nil, false +} + +// AsBlobContainerDataSetMapping is the BasicDataSetMapping implementation for BlobContainerDataSetMapping. +func (bcdsm BlobContainerDataSetMapping) AsBlobContainerDataSetMapping() (*BlobContainerDataSetMapping, bool) { + return &bcdsm, true +} + +// AsADLSGen2FileDataSetMapping is the BasicDataSetMapping implementation for BlobContainerDataSetMapping. +func (bcdsm BlobContainerDataSetMapping) AsADLSGen2FileDataSetMapping() (*ADLSGen2FileDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSetMapping is the BasicDataSetMapping implementation for BlobContainerDataSetMapping. +func (bcdsm BlobContainerDataSetMapping) AsADLSGen2FolderDataSetMapping() (*ADLSGen2FolderDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSetMapping is the BasicDataSetMapping implementation for BlobContainerDataSetMapping. +func (bcdsm BlobContainerDataSetMapping) AsADLSGen2FileSystemDataSetMapping() (*ADLSGen2FileSystemDataSetMapping, bool) { + return nil, false +} + +// AsKustoClusterDataSetMapping is the BasicDataSetMapping implementation for BlobContainerDataSetMapping. +func (bcdsm BlobContainerDataSetMapping) AsKustoClusterDataSetMapping() (*KustoClusterDataSetMapping, bool) { + return nil, false +} + +// AsKustoDatabaseDataSetMapping is the BasicDataSetMapping implementation for BlobContainerDataSetMapping. +func (bcdsm BlobContainerDataSetMapping) AsKustoDatabaseDataSetMapping() (*KustoDatabaseDataSetMapping, bool) { + return nil, false +} + +// AsSQLDWTableDataSetMapping is the BasicDataSetMapping implementation for BlobContainerDataSetMapping. +func (bcdsm BlobContainerDataSetMapping) AsSQLDWTableDataSetMapping() (*SQLDWTableDataSetMapping, bool) { + return nil, false +} + +// AsSQLDBTableDataSetMapping is the BasicDataSetMapping implementation for BlobContainerDataSetMapping. +func (bcdsm BlobContainerDataSetMapping) AsSQLDBTableDataSetMapping() (*SQLDBTableDataSetMapping, bool) { + return nil, false +} + +// AsDataSetMapping is the BasicDataSetMapping implementation for BlobContainerDataSetMapping. +func (bcdsm BlobContainerDataSetMapping) AsDataSetMapping() (*DataSetMapping, bool) { + return nil, false +} + +// AsBasicDataSetMapping is the BasicDataSetMapping implementation for BlobContainerDataSetMapping. +func (bcdsm BlobContainerDataSetMapping) AsBasicDataSetMapping() (BasicDataSetMapping, bool) { + return &bcdsm, true +} + +// UnmarshalJSON is the custom unmarshaler for BlobContainerDataSetMapping struct. +func (bcdsm *BlobContainerDataSetMapping) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var blobContainerMappingProperties BlobContainerMappingProperties + err = json.Unmarshal(*v, &blobContainerMappingProperties) + if err != nil { + return err + } + bcdsm.BlobContainerMappingProperties = &blobContainerMappingProperties + } + case "kind": + if v != nil { + var kind KindBasicDataSetMapping + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + bcdsm.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + bcdsm.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + bcdsm.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + bcdsm.Type = &typeVar + } + } + } + + return nil +} + +// BlobContainerMappingProperties azure storage Blob container data set mapping property bag. +type BlobContainerMappingProperties struct { + // ContainerName - BLOB Container name. + ContainerName *string `json:"containerName,omitempty"` + // DataSetID - The id of the source data set. + DataSetID *string `json:"dataSetId,omitempty"` + // DataSetMappingStatus - READ-ONLY; Gets the status of the data set mapping. Possible values include: 'Ok', 'Broken' + DataSetMappingStatus DataSetMappingStatus `json:"dataSetMappingStatus,omitempty"` + // ProvisioningState - READ-ONLY; Provisioning state of the data set mapping. Possible values include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed' + ProvisioningState ProvisioningState `json:"provisioningState,omitempty"` + // ResourceGroup - Resource group of storage account. + ResourceGroup *string `json:"resourceGroup,omitempty"` + // StorageAccountName - Storage account name of the source data set. + StorageAccountName *string `json:"storageAccountName,omitempty"` + // SubscriptionID - Subscription id of storage account. + SubscriptionID *string `json:"subscriptionId,omitempty"` +} + +// BlobContainerProperties properties of the BLOB container data set. +type BlobContainerProperties struct { + // ContainerName - BLOB Container name. + ContainerName *string `json:"containerName,omitempty"` + // DataSetID - READ-ONLY; Unique id for identifying a data set resource + DataSetID *string `json:"dataSetId,omitempty"` + // ResourceGroup - Resource group of storage account + ResourceGroup *string `json:"resourceGroup,omitempty"` + // StorageAccountName - Storage account name of the source data set + StorageAccountName *string `json:"storageAccountName,omitempty"` + // SubscriptionID - Subscription id of storage account + SubscriptionID *string `json:"subscriptionId,omitempty"` +} + +// BlobDataSet an Azure storage blob data set. +type BlobDataSet struct { + // BlobProperties - Blob data set properties. + *BlobProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindDataSet', 'KindBlob', 'KindBlobFolder', 'KindContainer', 'KindAdlsGen2File', 'KindAdlsGen2Folder', 'KindAdlsGen2FileSystem', 'KindAdlsGen1Folder', 'KindAdlsGen1File', 'KindKustoCluster', 'KindKustoDatabase', 'KindSQLDWTable', 'KindSQLDBTable' + Kind Kind `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for BlobDataSet. +func (bds BlobDataSet) MarshalJSON() ([]byte, error) { + bds.Kind = KindBlob + objectMap := make(map[string]interface{}) + if bds.BlobProperties != nil { + objectMap["properties"] = bds.BlobProperties + } + if bds.Kind != "" { + objectMap["kind"] = bds.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSet is the BasicDataSet implementation for BlobDataSet. +func (bds BlobDataSet) AsBlobDataSet() (*BlobDataSet, bool) { + return &bds, true +} + +// AsBlobFolderDataSet is the BasicDataSet implementation for BlobDataSet. +func (bds BlobDataSet) AsBlobFolderDataSet() (*BlobFolderDataSet, bool) { + return nil, false +} + +// AsBlobContainerDataSet is the BasicDataSet implementation for BlobDataSet. +func (bds BlobDataSet) AsBlobContainerDataSet() (*BlobContainerDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileDataSet is the BasicDataSet implementation for BlobDataSet. +func (bds BlobDataSet) AsADLSGen2FileDataSet() (*ADLSGen2FileDataSet, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSet is the BasicDataSet implementation for BlobDataSet. +func (bds BlobDataSet) AsADLSGen2FolderDataSet() (*ADLSGen2FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSet is the BasicDataSet implementation for BlobDataSet. +func (bds BlobDataSet) AsADLSGen2FileSystemDataSet() (*ADLSGen2FileSystemDataSet, bool) { + return nil, false +} + +// AsADLSGen1FolderDataSet is the BasicDataSet implementation for BlobDataSet. +func (bds BlobDataSet) AsADLSGen1FolderDataSet() (*ADLSGen1FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen1FileDataSet is the BasicDataSet implementation for BlobDataSet. +func (bds BlobDataSet) AsADLSGen1FileDataSet() (*ADLSGen1FileDataSet, bool) { + return nil, false +} + +// AsKustoClusterDataSet is the BasicDataSet implementation for BlobDataSet. +func (bds BlobDataSet) AsKustoClusterDataSet() (*KustoClusterDataSet, bool) { + return nil, false +} + +// AsKustoDatabaseDataSet is the BasicDataSet implementation for BlobDataSet. +func (bds BlobDataSet) AsKustoDatabaseDataSet() (*KustoDatabaseDataSet, bool) { + return nil, false +} + +// AsSQLDWTableDataSet is the BasicDataSet implementation for BlobDataSet. +func (bds BlobDataSet) AsSQLDWTableDataSet() (*SQLDWTableDataSet, bool) { + return nil, false +} + +// AsSQLDBTableDataSet is the BasicDataSet implementation for BlobDataSet. +func (bds BlobDataSet) AsSQLDBTableDataSet() (*SQLDBTableDataSet, bool) { + return nil, false +} + +// AsDataSet is the BasicDataSet implementation for BlobDataSet. +func (bds BlobDataSet) AsDataSet() (*DataSet, bool) { + return nil, false +} + +// AsBasicDataSet is the BasicDataSet implementation for BlobDataSet. +func (bds BlobDataSet) AsBasicDataSet() (BasicDataSet, bool) { + return &bds, true +} + +// UnmarshalJSON is the custom unmarshaler for BlobDataSet struct. +func (bds *BlobDataSet) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var blobProperties BlobProperties + err = json.Unmarshal(*v, &blobProperties) + if err != nil { + return err + } + bds.BlobProperties = &blobProperties + } + case "kind": + if v != nil { + var kind Kind + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + bds.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + bds.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + bds.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + bds.Type = &typeVar + } + } + } + + return nil +} + +// BlobDataSetMapping a Blob data set mapping. +type BlobDataSetMapping struct { + // BlobMappingProperties - Blob data set mapping properties. + *BlobMappingProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindBasicDataSetMappingKindDataSetMapping', 'KindBasicDataSetMappingKindBlob', 'KindBasicDataSetMappingKindBlobFolder', 'KindBasicDataSetMappingKindContainer', 'KindBasicDataSetMappingKindAdlsGen2File', 'KindBasicDataSetMappingKindAdlsGen2Folder', 'KindBasicDataSetMappingKindAdlsGen2FileSystem', 'KindBasicDataSetMappingKindKustoCluster', 'KindBasicDataSetMappingKindKustoDatabase', 'KindBasicDataSetMappingKindSQLDWTable', 'KindBasicDataSetMappingKindSQLDBTable' + Kind KindBasicDataSetMapping `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for BlobDataSetMapping. +func (bdsm BlobDataSetMapping) MarshalJSON() ([]byte, error) { + bdsm.Kind = KindBasicDataSetMappingKindBlob + objectMap := make(map[string]interface{}) + if bdsm.BlobMappingProperties != nil { + objectMap["properties"] = bdsm.BlobMappingProperties + } + if bdsm.Kind != "" { + objectMap["kind"] = bdsm.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSetMapping is the BasicDataSetMapping implementation for BlobDataSetMapping. +func (bdsm BlobDataSetMapping) AsBlobDataSetMapping() (*BlobDataSetMapping, bool) { + return &bdsm, true +} + +// AsBlobFolderDataSetMapping is the BasicDataSetMapping implementation for BlobDataSetMapping. +func (bdsm BlobDataSetMapping) AsBlobFolderDataSetMapping() (*BlobFolderDataSetMapping, bool) { + return nil, false +} + +// AsBlobContainerDataSetMapping is the BasicDataSetMapping implementation for BlobDataSetMapping. +func (bdsm BlobDataSetMapping) AsBlobContainerDataSetMapping() (*BlobContainerDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FileDataSetMapping is the BasicDataSetMapping implementation for BlobDataSetMapping. +func (bdsm BlobDataSetMapping) AsADLSGen2FileDataSetMapping() (*ADLSGen2FileDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSetMapping is the BasicDataSetMapping implementation for BlobDataSetMapping. +func (bdsm BlobDataSetMapping) AsADLSGen2FolderDataSetMapping() (*ADLSGen2FolderDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSetMapping is the BasicDataSetMapping implementation for BlobDataSetMapping. +func (bdsm BlobDataSetMapping) AsADLSGen2FileSystemDataSetMapping() (*ADLSGen2FileSystemDataSetMapping, bool) { + return nil, false +} + +// AsKustoClusterDataSetMapping is the BasicDataSetMapping implementation for BlobDataSetMapping. +func (bdsm BlobDataSetMapping) AsKustoClusterDataSetMapping() (*KustoClusterDataSetMapping, bool) { + return nil, false +} + +// AsKustoDatabaseDataSetMapping is the BasicDataSetMapping implementation for BlobDataSetMapping. +func (bdsm BlobDataSetMapping) AsKustoDatabaseDataSetMapping() (*KustoDatabaseDataSetMapping, bool) { + return nil, false +} + +// AsSQLDWTableDataSetMapping is the BasicDataSetMapping implementation for BlobDataSetMapping. +func (bdsm BlobDataSetMapping) AsSQLDWTableDataSetMapping() (*SQLDWTableDataSetMapping, bool) { + return nil, false +} + +// AsSQLDBTableDataSetMapping is the BasicDataSetMapping implementation for BlobDataSetMapping. +func (bdsm BlobDataSetMapping) AsSQLDBTableDataSetMapping() (*SQLDBTableDataSetMapping, bool) { + return nil, false +} + +// AsDataSetMapping is the BasicDataSetMapping implementation for BlobDataSetMapping. +func (bdsm BlobDataSetMapping) AsDataSetMapping() (*DataSetMapping, bool) { + return nil, false +} + +// AsBasicDataSetMapping is the BasicDataSetMapping implementation for BlobDataSetMapping. +func (bdsm BlobDataSetMapping) AsBasicDataSetMapping() (BasicDataSetMapping, bool) { + return &bdsm, true +} + +// UnmarshalJSON is the custom unmarshaler for BlobDataSetMapping struct. +func (bdsm *BlobDataSetMapping) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var blobMappingProperties BlobMappingProperties + err = json.Unmarshal(*v, &blobMappingProperties) + if err != nil { + return err + } + bdsm.BlobMappingProperties = &blobMappingProperties + } + case "kind": + if v != nil { + var kind KindBasicDataSetMapping + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + bdsm.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + bdsm.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + bdsm.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + bdsm.Type = &typeVar + } + } + } + + return nil +} + +// BlobFolderDataSet an Azure storage blob folder data set. +type BlobFolderDataSet struct { + // BlobFolderProperties - Blob folder data set properties. + *BlobFolderProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindDataSet', 'KindBlob', 'KindBlobFolder', 'KindContainer', 'KindAdlsGen2File', 'KindAdlsGen2Folder', 'KindAdlsGen2FileSystem', 'KindAdlsGen1Folder', 'KindAdlsGen1File', 'KindKustoCluster', 'KindKustoDatabase', 'KindSQLDWTable', 'KindSQLDBTable' + Kind Kind `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for BlobFolderDataSet. +func (bfds BlobFolderDataSet) MarshalJSON() ([]byte, error) { + bfds.Kind = KindBlobFolder + objectMap := make(map[string]interface{}) + if bfds.BlobFolderProperties != nil { + objectMap["properties"] = bfds.BlobFolderProperties + } + if bfds.Kind != "" { + objectMap["kind"] = bfds.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSet is the BasicDataSet implementation for BlobFolderDataSet. +func (bfds BlobFolderDataSet) AsBlobDataSet() (*BlobDataSet, bool) { + return nil, false +} + +// AsBlobFolderDataSet is the BasicDataSet implementation for BlobFolderDataSet. +func (bfds BlobFolderDataSet) AsBlobFolderDataSet() (*BlobFolderDataSet, bool) { + return &bfds, true +} + +// AsBlobContainerDataSet is the BasicDataSet implementation for BlobFolderDataSet. +func (bfds BlobFolderDataSet) AsBlobContainerDataSet() (*BlobContainerDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileDataSet is the BasicDataSet implementation for BlobFolderDataSet. +func (bfds BlobFolderDataSet) AsADLSGen2FileDataSet() (*ADLSGen2FileDataSet, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSet is the BasicDataSet implementation for BlobFolderDataSet. +func (bfds BlobFolderDataSet) AsADLSGen2FolderDataSet() (*ADLSGen2FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSet is the BasicDataSet implementation for BlobFolderDataSet. +func (bfds BlobFolderDataSet) AsADLSGen2FileSystemDataSet() (*ADLSGen2FileSystemDataSet, bool) { + return nil, false +} + +// AsADLSGen1FolderDataSet is the BasicDataSet implementation for BlobFolderDataSet. +func (bfds BlobFolderDataSet) AsADLSGen1FolderDataSet() (*ADLSGen1FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen1FileDataSet is the BasicDataSet implementation for BlobFolderDataSet. +func (bfds BlobFolderDataSet) AsADLSGen1FileDataSet() (*ADLSGen1FileDataSet, bool) { + return nil, false +} + +// AsKustoClusterDataSet is the BasicDataSet implementation for BlobFolderDataSet. +func (bfds BlobFolderDataSet) AsKustoClusterDataSet() (*KustoClusterDataSet, bool) { + return nil, false +} + +// AsKustoDatabaseDataSet is the BasicDataSet implementation for BlobFolderDataSet. +func (bfds BlobFolderDataSet) AsKustoDatabaseDataSet() (*KustoDatabaseDataSet, bool) { + return nil, false +} + +// AsSQLDWTableDataSet is the BasicDataSet implementation for BlobFolderDataSet. +func (bfds BlobFolderDataSet) AsSQLDWTableDataSet() (*SQLDWTableDataSet, bool) { + return nil, false +} + +// AsSQLDBTableDataSet is the BasicDataSet implementation for BlobFolderDataSet. +func (bfds BlobFolderDataSet) AsSQLDBTableDataSet() (*SQLDBTableDataSet, bool) { + return nil, false +} + +// AsDataSet is the BasicDataSet implementation for BlobFolderDataSet. +func (bfds BlobFolderDataSet) AsDataSet() (*DataSet, bool) { + return nil, false +} + +// AsBasicDataSet is the BasicDataSet implementation for BlobFolderDataSet. +func (bfds BlobFolderDataSet) AsBasicDataSet() (BasicDataSet, bool) { + return &bfds, true +} + +// UnmarshalJSON is the custom unmarshaler for BlobFolderDataSet struct. +func (bfds *BlobFolderDataSet) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var blobFolderProperties BlobFolderProperties + err = json.Unmarshal(*v, &blobFolderProperties) + if err != nil { + return err + } + bfds.BlobFolderProperties = &blobFolderProperties + } + case "kind": + if v != nil { + var kind Kind + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + bfds.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + bfds.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + bfds.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + bfds.Type = &typeVar + } + } + } + + return nil +} + +// BlobFolderDataSetMapping a Blob folder data set mapping. +type BlobFolderDataSetMapping struct { + // BlobFolderMappingProperties - Blob folder data set mapping properties. + *BlobFolderMappingProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindBasicDataSetMappingKindDataSetMapping', 'KindBasicDataSetMappingKindBlob', 'KindBasicDataSetMappingKindBlobFolder', 'KindBasicDataSetMappingKindContainer', 'KindBasicDataSetMappingKindAdlsGen2File', 'KindBasicDataSetMappingKindAdlsGen2Folder', 'KindBasicDataSetMappingKindAdlsGen2FileSystem', 'KindBasicDataSetMappingKindKustoCluster', 'KindBasicDataSetMappingKindKustoDatabase', 'KindBasicDataSetMappingKindSQLDWTable', 'KindBasicDataSetMappingKindSQLDBTable' + Kind KindBasicDataSetMapping `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for BlobFolderDataSetMapping. +func (bfdsm BlobFolderDataSetMapping) MarshalJSON() ([]byte, error) { + bfdsm.Kind = KindBasicDataSetMappingKindBlobFolder + objectMap := make(map[string]interface{}) + if bfdsm.BlobFolderMappingProperties != nil { + objectMap["properties"] = bfdsm.BlobFolderMappingProperties + } + if bfdsm.Kind != "" { + objectMap["kind"] = bfdsm.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSetMapping is the BasicDataSetMapping implementation for BlobFolderDataSetMapping. +func (bfdsm BlobFolderDataSetMapping) AsBlobDataSetMapping() (*BlobDataSetMapping, bool) { + return nil, false +} + +// AsBlobFolderDataSetMapping is the BasicDataSetMapping implementation for BlobFolderDataSetMapping. +func (bfdsm BlobFolderDataSetMapping) AsBlobFolderDataSetMapping() (*BlobFolderDataSetMapping, bool) { + return &bfdsm, true +} + +// AsBlobContainerDataSetMapping is the BasicDataSetMapping implementation for BlobFolderDataSetMapping. +func (bfdsm BlobFolderDataSetMapping) AsBlobContainerDataSetMapping() (*BlobContainerDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FileDataSetMapping is the BasicDataSetMapping implementation for BlobFolderDataSetMapping. +func (bfdsm BlobFolderDataSetMapping) AsADLSGen2FileDataSetMapping() (*ADLSGen2FileDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSetMapping is the BasicDataSetMapping implementation for BlobFolderDataSetMapping. +func (bfdsm BlobFolderDataSetMapping) AsADLSGen2FolderDataSetMapping() (*ADLSGen2FolderDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSetMapping is the BasicDataSetMapping implementation for BlobFolderDataSetMapping. +func (bfdsm BlobFolderDataSetMapping) AsADLSGen2FileSystemDataSetMapping() (*ADLSGen2FileSystemDataSetMapping, bool) { + return nil, false +} + +// AsKustoClusterDataSetMapping is the BasicDataSetMapping implementation for BlobFolderDataSetMapping. +func (bfdsm BlobFolderDataSetMapping) AsKustoClusterDataSetMapping() (*KustoClusterDataSetMapping, bool) { + return nil, false +} + +// AsKustoDatabaseDataSetMapping is the BasicDataSetMapping implementation for BlobFolderDataSetMapping. +func (bfdsm BlobFolderDataSetMapping) AsKustoDatabaseDataSetMapping() (*KustoDatabaseDataSetMapping, bool) { + return nil, false +} + +// AsSQLDWTableDataSetMapping is the BasicDataSetMapping implementation for BlobFolderDataSetMapping. +func (bfdsm BlobFolderDataSetMapping) AsSQLDWTableDataSetMapping() (*SQLDWTableDataSetMapping, bool) { + return nil, false +} + +// AsSQLDBTableDataSetMapping is the BasicDataSetMapping implementation for BlobFolderDataSetMapping. +func (bfdsm BlobFolderDataSetMapping) AsSQLDBTableDataSetMapping() (*SQLDBTableDataSetMapping, bool) { + return nil, false +} + +// AsDataSetMapping is the BasicDataSetMapping implementation for BlobFolderDataSetMapping. +func (bfdsm BlobFolderDataSetMapping) AsDataSetMapping() (*DataSetMapping, bool) { + return nil, false +} + +// AsBasicDataSetMapping is the BasicDataSetMapping implementation for BlobFolderDataSetMapping. +func (bfdsm BlobFolderDataSetMapping) AsBasicDataSetMapping() (BasicDataSetMapping, bool) { + return &bfdsm, true +} + +// UnmarshalJSON is the custom unmarshaler for BlobFolderDataSetMapping struct. +func (bfdsm *BlobFolderDataSetMapping) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var blobFolderMappingProperties BlobFolderMappingProperties + err = json.Unmarshal(*v, &blobFolderMappingProperties) + if err != nil { + return err + } + bfdsm.BlobFolderMappingProperties = &blobFolderMappingProperties + } + case "kind": + if v != nil { + var kind KindBasicDataSetMapping + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + bfdsm.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + bfdsm.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + bfdsm.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + bfdsm.Type = &typeVar + } + } + } + + return nil +} + +// BlobFolderMappingProperties azure storage Blob folder data set mapping property bag. +type BlobFolderMappingProperties struct { + // ContainerName - Container that has the file path. + ContainerName *string `json:"containerName,omitempty"` + // DataSetID - The id of the source data set. + DataSetID *string `json:"dataSetId,omitempty"` + // DataSetMappingStatus - READ-ONLY; Gets the status of the data set mapping. Possible values include: 'Ok', 'Broken' + DataSetMappingStatus DataSetMappingStatus `json:"dataSetMappingStatus,omitempty"` + // Prefix - Prefix for blob folder + Prefix *string `json:"prefix,omitempty"` + // ProvisioningState - READ-ONLY; Provisioning state of the data set mapping. Possible values include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed' + ProvisioningState ProvisioningState `json:"provisioningState,omitempty"` + // ResourceGroup - Resource group of storage account. + ResourceGroup *string `json:"resourceGroup,omitempty"` + // StorageAccountName - Storage account name of the source data set. + StorageAccountName *string `json:"storageAccountName,omitempty"` + // SubscriptionID - Subscription id of storage account. + SubscriptionID *string `json:"subscriptionId,omitempty"` +} + +// BlobFolderProperties properties of the blob folder data set. +type BlobFolderProperties struct { + // ContainerName - Container that has the file path. + ContainerName *string `json:"containerName,omitempty"` + // DataSetID - READ-ONLY; Unique id for identifying a data set resource + DataSetID *string `json:"dataSetId,omitempty"` + // Prefix - Prefix for blob folder + Prefix *string `json:"prefix,omitempty"` + // ResourceGroup - Resource group of storage account + ResourceGroup *string `json:"resourceGroup,omitempty"` + // StorageAccountName - Storage account name of the source data set + StorageAccountName *string `json:"storageAccountName,omitempty"` + // SubscriptionID - Subscription id of storage account + SubscriptionID *string `json:"subscriptionId,omitempty"` +} + +// BlobMappingProperties azure storage Blob data set mapping property bag. +type BlobMappingProperties struct { + // ContainerName - Container that has the file path. + ContainerName *string `json:"containerName,omitempty"` + // DataSetID - The id of the source data set. + DataSetID *string `json:"dataSetId,omitempty"` + // DataSetMappingStatus - READ-ONLY; Gets the status of the data set mapping. Possible values include: 'Ok', 'Broken' + DataSetMappingStatus DataSetMappingStatus `json:"dataSetMappingStatus,omitempty"` + // FilePath - File path within the source data set + FilePath *string `json:"filePath,omitempty"` + // OutputType - File output type. Possible values include: 'Csv', 'Parquet' + OutputType OutputType `json:"outputType,omitempty"` + // ProvisioningState - READ-ONLY; Provisioning state of the data set mapping. Possible values include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed' + ProvisioningState ProvisioningState `json:"provisioningState,omitempty"` + // ResourceGroup - Resource group of storage account. + ResourceGroup *string `json:"resourceGroup,omitempty"` + // StorageAccountName - Storage account name of the source data set. + StorageAccountName *string `json:"storageAccountName,omitempty"` + // SubscriptionID - Subscription id of storage account. + SubscriptionID *string `json:"subscriptionId,omitempty"` +} + +// BlobProperties properties of the blob data set. +type BlobProperties struct { + // ContainerName - Container that has the file path. + ContainerName *string `json:"containerName,omitempty"` + // DataSetID - READ-ONLY; Unique id for identifying a data set resource + DataSetID *string `json:"dataSetId,omitempty"` + // FilePath - File path within the source data set + FilePath *string `json:"filePath,omitempty"` + // ResourceGroup - Resource group of storage account + ResourceGroup *string `json:"resourceGroup,omitempty"` + // StorageAccountName - Storage account name of the source data set + StorageAccountName *string `json:"storageAccountName,omitempty"` + // SubscriptionID - Subscription id of storage account + SubscriptionID *string `json:"subscriptionId,omitempty"` +} + +// ConsumerInvitation a consumer Invitation data transfer object. +type ConsumerInvitation struct { + autorest.Response `json:"-"` + // ConsumerInvitationProperties - Properties on the account + *ConsumerInvitationProperties `json:"properties,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for ConsumerInvitation. +func (ci ConsumerInvitation) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if ci.ConsumerInvitationProperties != nil { + objectMap["properties"] = ci.ConsumerInvitationProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for ConsumerInvitation struct. +func (ci *ConsumerInvitation) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var consumerInvitationProperties ConsumerInvitationProperties + err = json.Unmarshal(*v, &consumerInvitationProperties) + if err != nil { + return err + } + ci.ConsumerInvitationProperties = &consumerInvitationProperties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + ci.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + ci.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ci.Type = &typeVar + } + } + } + + return nil +} + +// ConsumerInvitationList list response for get InvitationList +type ConsumerInvitationList struct { + autorest.Response `json:"-"` + // NextLink - The Url of next result page. + NextLink *string `json:"nextLink,omitempty"` + // Value - Collection of items of type DataTransferObjects. + Value *[]ConsumerInvitation `json:"value,omitempty"` +} + +// ConsumerInvitationListIterator provides access to a complete listing of ConsumerInvitation values. +type ConsumerInvitationListIterator struct { + i int + page ConsumerInvitationListPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *ConsumerInvitationListIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ConsumerInvitationListIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *ConsumerInvitationListIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter ConsumerInvitationListIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter ConsumerInvitationListIterator) Response() ConsumerInvitationList { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter ConsumerInvitationListIterator) Value() ConsumerInvitation { + if !iter.page.NotDone() { + return ConsumerInvitation{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the ConsumerInvitationListIterator type. +func NewConsumerInvitationListIterator(page ConsumerInvitationListPage) ConsumerInvitationListIterator { + return ConsumerInvitationListIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (cil ConsumerInvitationList) IsEmpty() bool { + return cil.Value == nil || len(*cil.Value) == 0 +} + +// consumerInvitationListPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (cil ConsumerInvitationList) consumerInvitationListPreparer(ctx context.Context) (*http.Request, error) { + if cil.NextLink == nil || len(to.String(cil.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(cil.NextLink))) +} + +// ConsumerInvitationListPage contains a page of ConsumerInvitation values. +type ConsumerInvitationListPage struct { + fn func(context.Context, ConsumerInvitationList) (ConsumerInvitationList, error) + cil ConsumerInvitationList +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *ConsumerInvitationListPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ConsumerInvitationListPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.cil) + if err != nil { + return err + } + page.cil = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *ConsumerInvitationListPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page ConsumerInvitationListPage) NotDone() bool { + return !page.cil.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page ConsumerInvitationListPage) Response() ConsumerInvitationList { + return page.cil +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page ConsumerInvitationListPage) Values() []ConsumerInvitation { + if page.cil.IsEmpty() { + return nil + } + return *page.cil.Value +} + +// Creates a new instance of the ConsumerInvitationListPage type. +func NewConsumerInvitationListPage(getNextPage func(context.Context, ConsumerInvitationList) (ConsumerInvitationList, error)) ConsumerInvitationListPage { + return ConsumerInvitationListPage{fn: getNextPage} +} + +// ConsumerInvitationProperties properties of consumer invitation +type ConsumerInvitationProperties struct { + // DataSetCount - READ-ONLY; Number of data sets in a share + DataSetCount *int32 `json:"dataSetCount,omitempty"` + // Description - READ-ONLY; Description shared when the invitation was created + Description *string `json:"description,omitempty"` + // InvitationID - Unique id of the invitation. + InvitationID *string `json:"invitationId,omitempty"` + // InvitationStatus - READ-ONLY; The status of the invitation. Possible values include: 'Pending', 'Accepted', 'Rejected', 'Withdrawn' + InvitationStatus InvitationStatus `json:"invitationStatus,omitempty"` + // Location - READ-ONLY; invitation location + Location *string `json:"location,omitempty"` + // ProviderEmail - READ-ONLY; Email of the provider who created the resource + ProviderEmail *string `json:"providerEmail,omitempty"` + // ProviderName - READ-ONLY; Name of the provider who created the resource + ProviderName *string `json:"providerName,omitempty"` + // ProviderTenantName - READ-ONLY; Tenant name of the provider who created the resource + ProviderTenantName *string `json:"providerTenantName,omitempty"` + // RespondedAt - READ-ONLY; The time the recipient responded to the invitation. + RespondedAt *date.Time `json:"respondedAt,omitempty"` + // SentAt - READ-ONLY; Gets the time at which the invitation was sent. + SentAt *date.Time `json:"sentAt,omitempty"` + // ShareName - READ-ONLY; Gets the source share Name. + ShareName *string `json:"shareName,omitempty"` + // TermsOfUse - READ-ONLY; Terms of use shared when the invitation was created + TermsOfUse *string `json:"termsOfUse,omitempty"` + // UserEmail - READ-ONLY; Email of the user who created the resource + UserEmail *string `json:"userEmail,omitempty"` + // UserName - READ-ONLY; Name of the user who created the resource + UserName *string `json:"userName,omitempty"` +} + +// ConsumerSourceDataSet a consumer side dataSet data transfer object. +type ConsumerSourceDataSet struct { + // ConsumerSourceDataSetProperties - source dataSet properties + *ConsumerSourceDataSetProperties `json:"properties,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for ConsumerSourceDataSet. +func (csds ConsumerSourceDataSet) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if csds.ConsumerSourceDataSetProperties != nil { + objectMap["properties"] = csds.ConsumerSourceDataSetProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for ConsumerSourceDataSet struct. +func (csds *ConsumerSourceDataSet) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var consumerSourceDataSetProperties ConsumerSourceDataSetProperties + err = json.Unmarshal(*v, &consumerSourceDataSetProperties) + if err != nil { + return err + } + csds.ConsumerSourceDataSetProperties = &consumerSourceDataSetProperties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + csds.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + csds.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + csds.Type = &typeVar + } + } + } + + return nil +} + +// ConsumerSourceDataSetList a consumer side list of source dataSets +type ConsumerSourceDataSetList struct { + autorest.Response `json:"-"` + // NextLink - The Url of next result page. + NextLink *string `json:"nextLink,omitempty"` + // Value - Collection of items of type DataTransferObjects. + Value *[]ConsumerSourceDataSet `json:"value,omitempty"` +} + +// ConsumerSourceDataSetListIterator provides access to a complete listing of ConsumerSourceDataSet values. +type ConsumerSourceDataSetListIterator struct { + i int + page ConsumerSourceDataSetListPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *ConsumerSourceDataSetListIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ConsumerSourceDataSetListIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *ConsumerSourceDataSetListIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter ConsumerSourceDataSetListIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter ConsumerSourceDataSetListIterator) Response() ConsumerSourceDataSetList { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter ConsumerSourceDataSetListIterator) Value() ConsumerSourceDataSet { + if !iter.page.NotDone() { + return ConsumerSourceDataSet{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the ConsumerSourceDataSetListIterator type. +func NewConsumerSourceDataSetListIterator(page ConsumerSourceDataSetListPage) ConsumerSourceDataSetListIterator { + return ConsumerSourceDataSetListIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (csdsl ConsumerSourceDataSetList) IsEmpty() bool { + return csdsl.Value == nil || len(*csdsl.Value) == 0 +} + +// consumerSourceDataSetListPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (csdsl ConsumerSourceDataSetList) consumerSourceDataSetListPreparer(ctx context.Context) (*http.Request, error) { + if csdsl.NextLink == nil || len(to.String(csdsl.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(csdsl.NextLink))) +} + +// ConsumerSourceDataSetListPage contains a page of ConsumerSourceDataSet values. +type ConsumerSourceDataSetListPage struct { + fn func(context.Context, ConsumerSourceDataSetList) (ConsumerSourceDataSetList, error) + csdsl ConsumerSourceDataSetList +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *ConsumerSourceDataSetListPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ConsumerSourceDataSetListPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.csdsl) + if err != nil { + return err + } + page.csdsl = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *ConsumerSourceDataSetListPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page ConsumerSourceDataSetListPage) NotDone() bool { + return !page.csdsl.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page ConsumerSourceDataSetListPage) Response() ConsumerSourceDataSetList { + return page.csdsl +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page ConsumerSourceDataSetListPage) Values() []ConsumerSourceDataSet { + if page.csdsl.IsEmpty() { + return nil + } + return *page.csdsl.Value +} + +// Creates a new instance of the ConsumerSourceDataSetListPage type. +func NewConsumerSourceDataSetListPage(getNextPage func(context.Context, ConsumerSourceDataSetList) (ConsumerSourceDataSetList, error)) ConsumerSourceDataSetListPage { + return ConsumerSourceDataSetListPage{fn: getNextPage} +} + +// ConsumerSourceDataSetProperties properties of consumer source dataSet +type ConsumerSourceDataSetProperties struct { + // DataSetID - READ-ONLY; DataSet Id + DataSetID *string `json:"dataSetId,omitempty"` + // DataSetLocation - READ-ONLY; Location of the data set. + DataSetLocation *string `json:"dataSetLocation,omitempty"` + // DataSetName - READ-ONLY; DataSet name + DataSetName *string `json:"dataSetName,omitempty"` + // DataSetPath - READ-ONLY; DataSet path + DataSetPath *string `json:"dataSetPath,omitempty"` + // DataSetType - READ-ONLY; Type of data set. Possible values include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SQLDBTable', 'SQLDWTable' + DataSetType DataSetType `json:"dataSetType,omitempty"` +} + +// BasicDataSet a DataSet data transfer object. +type BasicDataSet interface { + AsBlobDataSet() (*BlobDataSet, bool) + AsBlobFolderDataSet() (*BlobFolderDataSet, bool) + AsBlobContainerDataSet() (*BlobContainerDataSet, bool) + AsADLSGen2FileDataSet() (*ADLSGen2FileDataSet, bool) + AsADLSGen2FolderDataSet() (*ADLSGen2FolderDataSet, bool) + AsADLSGen2FileSystemDataSet() (*ADLSGen2FileSystemDataSet, bool) + AsADLSGen1FolderDataSet() (*ADLSGen1FolderDataSet, bool) + AsADLSGen1FileDataSet() (*ADLSGen1FileDataSet, bool) + AsKustoClusterDataSet() (*KustoClusterDataSet, bool) + AsKustoDatabaseDataSet() (*KustoDatabaseDataSet, bool) + AsSQLDWTableDataSet() (*SQLDWTableDataSet, bool) + AsSQLDBTableDataSet() (*SQLDBTableDataSet, bool) + AsDataSet() (*DataSet, bool) +} + +// DataSet a DataSet data transfer object. +type DataSet struct { + autorest.Response `json:"-"` + // Kind - Possible values include: 'KindDataSet', 'KindBlob', 'KindBlobFolder', 'KindContainer', 'KindAdlsGen2File', 'KindAdlsGen2Folder', 'KindAdlsGen2FileSystem', 'KindAdlsGen1Folder', 'KindAdlsGen1File', 'KindKustoCluster', 'KindKustoDatabase', 'KindSQLDWTable', 'KindSQLDBTable' + Kind Kind `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +func unmarshalBasicDataSet(body []byte) (BasicDataSet, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["kind"] { + case string(KindBlob): + var bds BlobDataSet + err := json.Unmarshal(body, &bds) + return bds, err + case string(KindBlobFolder): + var bfds BlobFolderDataSet + err := json.Unmarshal(body, &bfds) + return bfds, err + case string(KindContainer): + var bcds BlobContainerDataSet + err := json.Unmarshal(body, &bcds) + return bcds, err + case string(KindAdlsGen2File): + var ag2fds ADLSGen2FileDataSet + err := json.Unmarshal(body, &ag2fds) + return ag2fds, err + case string(KindAdlsGen2Folder): + var ag2fds ADLSGen2FolderDataSet + err := json.Unmarshal(body, &ag2fds) + return ag2fds, err + case string(KindAdlsGen2FileSystem): + var ag2fsds ADLSGen2FileSystemDataSet + err := json.Unmarshal(body, &ag2fsds) + return ag2fsds, err + case string(KindAdlsGen1Folder): + var ag1fds ADLSGen1FolderDataSet + err := json.Unmarshal(body, &ag1fds) + return ag1fds, err + case string(KindAdlsGen1File): + var ag1fds ADLSGen1FileDataSet + err := json.Unmarshal(body, &ag1fds) + return ag1fds, err + case string(KindKustoCluster): + var kcds KustoClusterDataSet + err := json.Unmarshal(body, &kcds) + return kcds, err + case string(KindKustoDatabase): + var kdds KustoDatabaseDataSet + err := json.Unmarshal(body, &kdds) + return kdds, err + case string(KindSQLDWTable): + var sdtds SQLDWTableDataSet + err := json.Unmarshal(body, &sdtds) + return sdtds, err + case string(KindSQLDBTable): + var sdtds SQLDBTableDataSet + err := json.Unmarshal(body, &sdtds) + return sdtds, err + default: + var ds DataSet + err := json.Unmarshal(body, &ds) + return ds, err + } +} +func unmarshalBasicDataSetArray(body []byte) ([]BasicDataSet, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + dsArray := make([]BasicDataSet, len(rawMessages)) + + for index, rawMessage := range rawMessages { + ds, err := unmarshalBasicDataSet(*rawMessage) + if err != nil { + return nil, err + } + dsArray[index] = ds + } + return dsArray, nil +} + +// MarshalJSON is the custom marshaler for DataSet. +func (ds DataSet) MarshalJSON() ([]byte, error) { + ds.Kind = KindDataSet + objectMap := make(map[string]interface{}) + if ds.Kind != "" { + objectMap["kind"] = ds.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSet is the BasicDataSet implementation for DataSet. +func (ds DataSet) AsBlobDataSet() (*BlobDataSet, bool) { + return nil, false +} + +// AsBlobFolderDataSet is the BasicDataSet implementation for DataSet. +func (ds DataSet) AsBlobFolderDataSet() (*BlobFolderDataSet, bool) { + return nil, false +} + +// AsBlobContainerDataSet is the BasicDataSet implementation for DataSet. +func (ds DataSet) AsBlobContainerDataSet() (*BlobContainerDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileDataSet is the BasicDataSet implementation for DataSet. +func (ds DataSet) AsADLSGen2FileDataSet() (*ADLSGen2FileDataSet, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSet is the BasicDataSet implementation for DataSet. +func (ds DataSet) AsADLSGen2FolderDataSet() (*ADLSGen2FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSet is the BasicDataSet implementation for DataSet. +func (ds DataSet) AsADLSGen2FileSystemDataSet() (*ADLSGen2FileSystemDataSet, bool) { + return nil, false +} + +// AsADLSGen1FolderDataSet is the BasicDataSet implementation for DataSet. +func (ds DataSet) AsADLSGen1FolderDataSet() (*ADLSGen1FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen1FileDataSet is the BasicDataSet implementation for DataSet. +func (ds DataSet) AsADLSGen1FileDataSet() (*ADLSGen1FileDataSet, bool) { + return nil, false +} + +// AsKustoClusterDataSet is the BasicDataSet implementation for DataSet. +func (ds DataSet) AsKustoClusterDataSet() (*KustoClusterDataSet, bool) { + return nil, false +} + +// AsKustoDatabaseDataSet is the BasicDataSet implementation for DataSet. +func (ds DataSet) AsKustoDatabaseDataSet() (*KustoDatabaseDataSet, bool) { + return nil, false +} + +// AsSQLDWTableDataSet is the BasicDataSet implementation for DataSet. +func (ds DataSet) AsSQLDWTableDataSet() (*SQLDWTableDataSet, bool) { + return nil, false +} + +// AsSQLDBTableDataSet is the BasicDataSet implementation for DataSet. +func (ds DataSet) AsSQLDBTableDataSet() (*SQLDBTableDataSet, bool) { + return nil, false +} + +// AsDataSet is the BasicDataSet implementation for DataSet. +func (ds DataSet) AsDataSet() (*DataSet, bool) { + return &ds, true +} + +// AsBasicDataSet is the BasicDataSet implementation for DataSet. +func (ds DataSet) AsBasicDataSet() (BasicDataSet, bool) { + return &ds, true +} + +// DataSetList list response for get DataSets +type DataSetList struct { + autorest.Response `json:"-"` + // NextLink - The Url of next result page. + NextLink *string `json:"nextLink,omitempty"` + // Value - Collection of items of type DataTransferObjects. + Value *[]BasicDataSet `json:"value,omitempty"` +} + +// UnmarshalJSON is the custom unmarshaler for DataSetList struct. +func (dsl *DataSetList) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "nextLink": + if v != nil { + var nextLink string + err = json.Unmarshal(*v, &nextLink) + if err != nil { + return err + } + dsl.NextLink = &nextLink + } + case "value": + if v != nil { + value, err := unmarshalBasicDataSetArray(*v) + if err != nil { + return err + } + dsl.Value = &value + } + } + } + + return nil +} + +// DataSetListIterator provides access to a complete listing of DataSet values. +type DataSetListIterator struct { + i int + page DataSetListPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *DataSetListIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/DataSetListIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *DataSetListIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter DataSetListIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter DataSetListIterator) Response() DataSetList { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter DataSetListIterator) Value() BasicDataSet { + if !iter.page.NotDone() { + return DataSet{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the DataSetListIterator type. +func NewDataSetListIterator(page DataSetListPage) DataSetListIterator { + return DataSetListIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (dsl DataSetList) IsEmpty() bool { + return dsl.Value == nil || len(*dsl.Value) == 0 +} + +// dataSetListPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (dsl DataSetList) dataSetListPreparer(ctx context.Context) (*http.Request, error) { + if dsl.NextLink == nil || len(to.String(dsl.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(dsl.NextLink))) +} + +// DataSetListPage contains a page of BasicDataSet values. +type DataSetListPage struct { + fn func(context.Context, DataSetList) (DataSetList, error) + dsl DataSetList +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *DataSetListPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/DataSetListPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.dsl) + if err != nil { + return err + } + page.dsl = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *DataSetListPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page DataSetListPage) NotDone() bool { + return !page.dsl.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page DataSetListPage) Response() DataSetList { + return page.dsl +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page DataSetListPage) Values() []BasicDataSet { + if page.dsl.IsEmpty() { + return nil + } + return *page.dsl.Value +} + +// Creates a new instance of the DataSetListPage type. +func NewDataSetListPage(getNextPage func(context.Context, DataSetList) (DataSetList, error)) DataSetListPage { + return DataSetListPage{fn: getNextPage} +} + +// BasicDataSetMapping a data set mapping data transfer object. +type BasicDataSetMapping interface { + AsBlobDataSetMapping() (*BlobDataSetMapping, bool) + AsBlobFolderDataSetMapping() (*BlobFolderDataSetMapping, bool) + AsBlobContainerDataSetMapping() (*BlobContainerDataSetMapping, bool) + AsADLSGen2FileDataSetMapping() (*ADLSGen2FileDataSetMapping, bool) + AsADLSGen2FolderDataSetMapping() (*ADLSGen2FolderDataSetMapping, bool) + AsADLSGen2FileSystemDataSetMapping() (*ADLSGen2FileSystemDataSetMapping, bool) + AsKustoClusterDataSetMapping() (*KustoClusterDataSetMapping, bool) + AsKustoDatabaseDataSetMapping() (*KustoDatabaseDataSetMapping, bool) + AsSQLDWTableDataSetMapping() (*SQLDWTableDataSetMapping, bool) + AsSQLDBTableDataSetMapping() (*SQLDBTableDataSetMapping, bool) + AsDataSetMapping() (*DataSetMapping, bool) +} + +// DataSetMapping a data set mapping data transfer object. +type DataSetMapping struct { + autorest.Response `json:"-"` + // Kind - Possible values include: 'KindBasicDataSetMappingKindDataSetMapping', 'KindBasicDataSetMappingKindBlob', 'KindBasicDataSetMappingKindBlobFolder', 'KindBasicDataSetMappingKindContainer', 'KindBasicDataSetMappingKindAdlsGen2File', 'KindBasicDataSetMappingKindAdlsGen2Folder', 'KindBasicDataSetMappingKindAdlsGen2FileSystem', 'KindBasicDataSetMappingKindKustoCluster', 'KindBasicDataSetMappingKindKustoDatabase', 'KindBasicDataSetMappingKindSQLDWTable', 'KindBasicDataSetMappingKindSQLDBTable' + Kind KindBasicDataSetMapping `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +func unmarshalBasicDataSetMapping(body []byte) (BasicDataSetMapping, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["kind"] { + case string(KindBasicDataSetMappingKindBlob): + var bdsm BlobDataSetMapping + err := json.Unmarshal(body, &bdsm) + return bdsm, err + case string(KindBasicDataSetMappingKindBlobFolder): + var bfdsm BlobFolderDataSetMapping + err := json.Unmarshal(body, &bfdsm) + return bfdsm, err + case string(KindBasicDataSetMappingKindContainer): + var bcdsm BlobContainerDataSetMapping + err := json.Unmarshal(body, &bcdsm) + return bcdsm, err + case string(KindBasicDataSetMappingKindAdlsGen2File): + var ag2fdsm ADLSGen2FileDataSetMapping + err := json.Unmarshal(body, &ag2fdsm) + return ag2fdsm, err + case string(KindBasicDataSetMappingKindAdlsGen2Folder): + var ag2fdsm ADLSGen2FolderDataSetMapping + err := json.Unmarshal(body, &ag2fdsm) + return ag2fdsm, err + case string(KindBasicDataSetMappingKindAdlsGen2FileSystem): + var ag2fsdsm ADLSGen2FileSystemDataSetMapping + err := json.Unmarshal(body, &ag2fsdsm) + return ag2fsdsm, err + case string(KindBasicDataSetMappingKindKustoCluster): + var kcdsm KustoClusterDataSetMapping + err := json.Unmarshal(body, &kcdsm) + return kcdsm, err + case string(KindBasicDataSetMappingKindKustoDatabase): + var kddsm KustoDatabaseDataSetMapping + err := json.Unmarshal(body, &kddsm) + return kddsm, err + case string(KindBasicDataSetMappingKindSQLDWTable): + var sdtdsm SQLDWTableDataSetMapping + err := json.Unmarshal(body, &sdtdsm) + return sdtdsm, err + case string(KindBasicDataSetMappingKindSQLDBTable): + var sdtdsm SQLDBTableDataSetMapping + err := json.Unmarshal(body, &sdtdsm) + return sdtdsm, err + default: + var dsm DataSetMapping + err := json.Unmarshal(body, &dsm) + return dsm, err + } +} +func unmarshalBasicDataSetMappingArray(body []byte) ([]BasicDataSetMapping, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + dsmArray := make([]BasicDataSetMapping, len(rawMessages)) + + for index, rawMessage := range rawMessages { + dsm, err := unmarshalBasicDataSetMapping(*rawMessage) + if err != nil { + return nil, err + } + dsmArray[index] = dsm + } + return dsmArray, nil +} + +// MarshalJSON is the custom marshaler for DataSetMapping. +func (dsm DataSetMapping) MarshalJSON() ([]byte, error) { + dsm.Kind = KindBasicDataSetMappingKindDataSetMapping + objectMap := make(map[string]interface{}) + if dsm.Kind != "" { + objectMap["kind"] = dsm.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSetMapping is the BasicDataSetMapping implementation for DataSetMapping. +func (dsm DataSetMapping) AsBlobDataSetMapping() (*BlobDataSetMapping, bool) { + return nil, false +} + +// AsBlobFolderDataSetMapping is the BasicDataSetMapping implementation for DataSetMapping. +func (dsm DataSetMapping) AsBlobFolderDataSetMapping() (*BlobFolderDataSetMapping, bool) { + return nil, false +} + +// AsBlobContainerDataSetMapping is the BasicDataSetMapping implementation for DataSetMapping. +func (dsm DataSetMapping) AsBlobContainerDataSetMapping() (*BlobContainerDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FileDataSetMapping is the BasicDataSetMapping implementation for DataSetMapping. +func (dsm DataSetMapping) AsADLSGen2FileDataSetMapping() (*ADLSGen2FileDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSetMapping is the BasicDataSetMapping implementation for DataSetMapping. +func (dsm DataSetMapping) AsADLSGen2FolderDataSetMapping() (*ADLSGen2FolderDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSetMapping is the BasicDataSetMapping implementation for DataSetMapping. +func (dsm DataSetMapping) AsADLSGen2FileSystemDataSetMapping() (*ADLSGen2FileSystemDataSetMapping, bool) { + return nil, false +} + +// AsKustoClusterDataSetMapping is the BasicDataSetMapping implementation for DataSetMapping. +func (dsm DataSetMapping) AsKustoClusterDataSetMapping() (*KustoClusterDataSetMapping, bool) { + return nil, false +} + +// AsKustoDatabaseDataSetMapping is the BasicDataSetMapping implementation for DataSetMapping. +func (dsm DataSetMapping) AsKustoDatabaseDataSetMapping() (*KustoDatabaseDataSetMapping, bool) { + return nil, false +} + +// AsSQLDWTableDataSetMapping is the BasicDataSetMapping implementation for DataSetMapping. +func (dsm DataSetMapping) AsSQLDWTableDataSetMapping() (*SQLDWTableDataSetMapping, bool) { + return nil, false +} + +// AsSQLDBTableDataSetMapping is the BasicDataSetMapping implementation for DataSetMapping. +func (dsm DataSetMapping) AsSQLDBTableDataSetMapping() (*SQLDBTableDataSetMapping, bool) { + return nil, false +} + +// AsDataSetMapping is the BasicDataSetMapping implementation for DataSetMapping. +func (dsm DataSetMapping) AsDataSetMapping() (*DataSetMapping, bool) { + return &dsm, true +} + +// AsBasicDataSetMapping is the BasicDataSetMapping implementation for DataSetMapping. +func (dsm DataSetMapping) AsBasicDataSetMapping() (BasicDataSetMapping, bool) { + return &dsm, true +} + +// DataSetMappingList list response for get DataSetMappings +type DataSetMappingList struct { + autorest.Response `json:"-"` + // NextLink - The Url of next result page. + NextLink *string `json:"nextLink,omitempty"` + // Value - Collection of items of type DataTransferObjects. + Value *[]BasicDataSetMapping `json:"value,omitempty"` +} + +// UnmarshalJSON is the custom unmarshaler for DataSetMappingList struct. +func (dsml *DataSetMappingList) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "nextLink": + if v != nil { + var nextLink string + err = json.Unmarshal(*v, &nextLink) + if err != nil { + return err + } + dsml.NextLink = &nextLink + } + case "value": + if v != nil { + value, err := unmarshalBasicDataSetMappingArray(*v) + if err != nil { + return err + } + dsml.Value = &value + } + } + } + + return nil +} + +// DataSetMappingListIterator provides access to a complete listing of DataSetMapping values. +type DataSetMappingListIterator struct { + i int + page DataSetMappingListPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *DataSetMappingListIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/DataSetMappingListIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *DataSetMappingListIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter DataSetMappingListIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter DataSetMappingListIterator) Response() DataSetMappingList { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter DataSetMappingListIterator) Value() BasicDataSetMapping { + if !iter.page.NotDone() { + return DataSetMapping{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the DataSetMappingListIterator type. +func NewDataSetMappingListIterator(page DataSetMappingListPage) DataSetMappingListIterator { + return DataSetMappingListIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (dsml DataSetMappingList) IsEmpty() bool { + return dsml.Value == nil || len(*dsml.Value) == 0 +} + +// dataSetMappingListPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (dsml DataSetMappingList) dataSetMappingListPreparer(ctx context.Context) (*http.Request, error) { + if dsml.NextLink == nil || len(to.String(dsml.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(dsml.NextLink))) +} + +// DataSetMappingListPage contains a page of BasicDataSetMapping values. +type DataSetMappingListPage struct { + fn func(context.Context, DataSetMappingList) (DataSetMappingList, error) + dsml DataSetMappingList +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *DataSetMappingListPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/DataSetMappingListPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.dsml) + if err != nil { + return err + } + page.dsml = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *DataSetMappingListPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page DataSetMappingListPage) NotDone() bool { + return !page.dsml.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page DataSetMappingListPage) Response() DataSetMappingList { + return page.dsml +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page DataSetMappingListPage) Values() []BasicDataSetMapping { + if page.dsml.IsEmpty() { + return nil + } + return *page.dsml.Value +} + +// Creates a new instance of the DataSetMappingListPage type. +func NewDataSetMappingListPage(getNextPage func(context.Context, DataSetMappingList) (DataSetMappingList, error)) DataSetMappingListPage { + return DataSetMappingListPage{fn: getNextPage} +} + +// DataSetMappingModel ... +type DataSetMappingModel struct { + autorest.Response `json:"-"` + Value BasicDataSetMapping `json:"value,omitempty"` +} + +// UnmarshalJSON is the custom unmarshaler for DataSetMappingModel struct. +func (dsmm *DataSetMappingModel) UnmarshalJSON(body []byte) error { + dsm, err := unmarshalBasicDataSetMapping(body) + if err != nil { + return err + } + dsmm.Value = dsm + + return nil +} + +// DataSetModel ... +type DataSetModel struct { + autorest.Response `json:"-"` + Value BasicDataSet `json:"value,omitempty"` +} + +// UnmarshalJSON is the custom unmarshaler for DataSetModel struct. +func (dsm *DataSetModel) UnmarshalJSON(body []byte) error { + ds, err := unmarshalBasicDataSet(body) + if err != nil { + return err + } + dsm.Value = ds + + return nil +} + +// DataSetsDeleteFuture an abstraction for monitoring and retrieving the results of a long-running +// operation. +type DataSetsDeleteFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *DataSetsDeleteFuture) Result(client DataSetsClient) (ar autorest.Response, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.DataSetsDeleteFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("datashare.DataSetsDeleteFuture") + return + } + ar.Response = future.Response() + return +} + +// DefaultDto base data transfer object implementation for default resources. +type DefaultDto struct { + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Location - Location of the azure resource. + Location *string `json:"location,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Tags - Tags on the azure resource. + Tags map[string]*string `json:"tags"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for DefaultDto. +func (dd DefaultDto) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if dd.Location != nil { + objectMap["location"] = dd.Location + } + if dd.Tags != nil { + objectMap["tags"] = dd.Tags + } + return json.Marshal(objectMap) +} + +// DimensionProperties properties for dimension +type DimensionProperties struct { + // DisplayName - localized display name of the dimension to customer + DisplayName *string `json:"displayName,omitempty"` + // Name - dimension name + Name *string `json:"name,omitempty"` +} + +// Error the data share error model. +type Error struct { + // Error - The data share error body + Error *ErrorInfo `json:"error,omitempty"` +} + +// ErrorInfo the data share error body model. +type ErrorInfo struct { + // Code - Code of the error + Code *string `json:"code,omitempty"` + // Details - Nested details of the error model + Details *[]ErrorInfo `json:"details,omitempty"` + // Message - Message of the error + Message *string `json:"message,omitempty"` + // Target - Target of the error + Target *string `json:"target,omitempty"` +} + +// Identity identity of resource +type Identity struct { + // PrincipalID - READ-ONLY; service principal Id + PrincipalID *string `json:"principalId,omitempty"` + // TenantID - READ-ONLY; Tenant Id + TenantID *string `json:"tenantId,omitempty"` + // Type - Identity Type. Possible values include: 'SystemAssigned' + Type Type `json:"type,omitempty"` +} + +// Invitation a Invitation data transfer object. +type Invitation struct { + autorest.Response `json:"-"` + // InvitationProperties - Properties on the Invitation + *InvitationProperties `json:"properties,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for Invitation. +func (i Invitation) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if i.InvitationProperties != nil { + objectMap["properties"] = i.InvitationProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for Invitation struct. +func (i *Invitation) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var invitationProperties InvitationProperties + err = json.Unmarshal(*v, &invitationProperties) + if err != nil { + return err + } + i.InvitationProperties = &invitationProperties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + i.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + i.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + i.Type = &typeVar + } + } + } + + return nil +} + +// InvitationList list response for get InvitationList +type InvitationList struct { + autorest.Response `json:"-"` + // NextLink - The Url of next result page. + NextLink *string `json:"nextLink,omitempty"` + // Value - Collection of items of type DataTransferObjects. + Value *[]Invitation `json:"value,omitempty"` +} + +// InvitationListIterator provides access to a complete listing of Invitation values. +type InvitationListIterator struct { + i int + page InvitationListPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *InvitationListIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InvitationListIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *InvitationListIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter InvitationListIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter InvitationListIterator) Response() InvitationList { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter InvitationListIterator) Value() Invitation { + if !iter.page.NotDone() { + return Invitation{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the InvitationListIterator type. +func NewInvitationListIterator(page InvitationListPage) InvitationListIterator { + return InvitationListIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (il InvitationList) IsEmpty() bool { + return il.Value == nil || len(*il.Value) == 0 +} + +// invitationListPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (il InvitationList) invitationListPreparer(ctx context.Context) (*http.Request, error) { + if il.NextLink == nil || len(to.String(il.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(il.NextLink))) +} + +// InvitationListPage contains a page of Invitation values. +type InvitationListPage struct { + fn func(context.Context, InvitationList) (InvitationList, error) + il InvitationList +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *InvitationListPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/InvitationListPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.il) + if err != nil { + return err + } + page.il = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *InvitationListPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page InvitationListPage) NotDone() bool { + return !page.il.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page InvitationListPage) Response() InvitationList { + return page.il +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page InvitationListPage) Values() []Invitation { + if page.il.IsEmpty() { + return nil + } + return *page.il.Value +} + +// Creates a new instance of the InvitationListPage type. +func NewInvitationListPage(getNextPage func(context.Context, InvitationList) (InvitationList, error)) InvitationListPage { + return InvitationListPage{fn: getNextPage} +} + +// InvitationProperties invitation property bag. +type InvitationProperties struct { + // InvitationID - READ-ONLY; unique invitation id + InvitationID *string `json:"invitationId,omitempty"` + // InvitationStatus - READ-ONLY; The status of the invitation. Possible values include: 'Pending', 'Accepted', 'Rejected', 'Withdrawn' + InvitationStatus InvitationStatus `json:"invitationStatus,omitempty"` + // RespondedAt - READ-ONLY; The time the recipient responded to the invitation. + RespondedAt *date.Time `json:"respondedAt,omitempty"` + // SentAt - READ-ONLY; Gets the time at which the invitation was sent. + SentAt *date.Time `json:"sentAt,omitempty"` + // TargetActiveDirectoryID - The target Azure AD Id. Can't be combined with email. + TargetActiveDirectoryID *string `json:"targetActiveDirectoryId,omitempty"` + // TargetEmail - The email the invitation is directed to. + TargetEmail *string `json:"targetEmail,omitempty"` + // TargetObjectID - The target user or application Id that invitation is being sent to. + // Must be specified along TargetActiveDirectoryId. This enables sending + // invitations to specific users or applications in an AD tenant. + TargetObjectID *string `json:"targetObjectId,omitempty"` + // UserEmail - READ-ONLY; Email of the user who created the resource + UserEmail *string `json:"userEmail,omitempty"` + // UserName - READ-ONLY; Name of the user who created the resource + UserName *string `json:"userName,omitempty"` +} + +// KustoClusterDataSet a kusto cluster data set. +type KustoClusterDataSet struct { + // KustoClusterDataSetProperties - Kusto cluster data set properties. + *KustoClusterDataSetProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindDataSet', 'KindBlob', 'KindBlobFolder', 'KindContainer', 'KindAdlsGen2File', 'KindAdlsGen2Folder', 'KindAdlsGen2FileSystem', 'KindAdlsGen1Folder', 'KindAdlsGen1File', 'KindKustoCluster', 'KindKustoDatabase', 'KindSQLDWTable', 'KindSQLDBTable' + Kind Kind `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for KustoClusterDataSet. +func (kcds KustoClusterDataSet) MarshalJSON() ([]byte, error) { + kcds.Kind = KindKustoCluster + objectMap := make(map[string]interface{}) + if kcds.KustoClusterDataSetProperties != nil { + objectMap["properties"] = kcds.KustoClusterDataSetProperties + } + if kcds.Kind != "" { + objectMap["kind"] = kcds.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSet is the BasicDataSet implementation for KustoClusterDataSet. +func (kcds KustoClusterDataSet) AsBlobDataSet() (*BlobDataSet, bool) { + return nil, false +} + +// AsBlobFolderDataSet is the BasicDataSet implementation for KustoClusterDataSet. +func (kcds KustoClusterDataSet) AsBlobFolderDataSet() (*BlobFolderDataSet, bool) { + return nil, false +} + +// AsBlobContainerDataSet is the BasicDataSet implementation for KustoClusterDataSet. +func (kcds KustoClusterDataSet) AsBlobContainerDataSet() (*BlobContainerDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileDataSet is the BasicDataSet implementation for KustoClusterDataSet. +func (kcds KustoClusterDataSet) AsADLSGen2FileDataSet() (*ADLSGen2FileDataSet, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSet is the BasicDataSet implementation for KustoClusterDataSet. +func (kcds KustoClusterDataSet) AsADLSGen2FolderDataSet() (*ADLSGen2FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSet is the BasicDataSet implementation for KustoClusterDataSet. +func (kcds KustoClusterDataSet) AsADLSGen2FileSystemDataSet() (*ADLSGen2FileSystemDataSet, bool) { + return nil, false +} + +// AsADLSGen1FolderDataSet is the BasicDataSet implementation for KustoClusterDataSet. +func (kcds KustoClusterDataSet) AsADLSGen1FolderDataSet() (*ADLSGen1FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen1FileDataSet is the BasicDataSet implementation for KustoClusterDataSet. +func (kcds KustoClusterDataSet) AsADLSGen1FileDataSet() (*ADLSGen1FileDataSet, bool) { + return nil, false +} + +// AsKustoClusterDataSet is the BasicDataSet implementation for KustoClusterDataSet. +func (kcds KustoClusterDataSet) AsKustoClusterDataSet() (*KustoClusterDataSet, bool) { + return &kcds, true +} + +// AsKustoDatabaseDataSet is the BasicDataSet implementation for KustoClusterDataSet. +func (kcds KustoClusterDataSet) AsKustoDatabaseDataSet() (*KustoDatabaseDataSet, bool) { + return nil, false +} + +// AsSQLDWTableDataSet is the BasicDataSet implementation for KustoClusterDataSet. +func (kcds KustoClusterDataSet) AsSQLDWTableDataSet() (*SQLDWTableDataSet, bool) { + return nil, false +} + +// AsSQLDBTableDataSet is the BasicDataSet implementation for KustoClusterDataSet. +func (kcds KustoClusterDataSet) AsSQLDBTableDataSet() (*SQLDBTableDataSet, bool) { + return nil, false +} + +// AsDataSet is the BasicDataSet implementation for KustoClusterDataSet. +func (kcds KustoClusterDataSet) AsDataSet() (*DataSet, bool) { + return nil, false +} + +// AsBasicDataSet is the BasicDataSet implementation for KustoClusterDataSet. +func (kcds KustoClusterDataSet) AsBasicDataSet() (BasicDataSet, bool) { + return &kcds, true +} + +// UnmarshalJSON is the custom unmarshaler for KustoClusterDataSet struct. +func (kcds *KustoClusterDataSet) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var kustoClusterDataSetProperties KustoClusterDataSetProperties + err = json.Unmarshal(*v, &kustoClusterDataSetProperties) + if err != nil { + return err + } + kcds.KustoClusterDataSetProperties = &kustoClusterDataSetProperties + } + case "kind": + if v != nil { + var kind Kind + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + kcds.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + kcds.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + kcds.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + kcds.Type = &typeVar + } + } + } + + return nil +} + +// KustoClusterDataSetMapping a Kusto cluster data set mapping +type KustoClusterDataSetMapping struct { + // KustoClusterDataSetMappingProperties - Kusto cluster data set mapping properties. + *KustoClusterDataSetMappingProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindBasicDataSetMappingKindDataSetMapping', 'KindBasicDataSetMappingKindBlob', 'KindBasicDataSetMappingKindBlobFolder', 'KindBasicDataSetMappingKindContainer', 'KindBasicDataSetMappingKindAdlsGen2File', 'KindBasicDataSetMappingKindAdlsGen2Folder', 'KindBasicDataSetMappingKindAdlsGen2FileSystem', 'KindBasicDataSetMappingKindKustoCluster', 'KindBasicDataSetMappingKindKustoDatabase', 'KindBasicDataSetMappingKindSQLDWTable', 'KindBasicDataSetMappingKindSQLDBTable' + Kind KindBasicDataSetMapping `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for KustoClusterDataSetMapping. +func (kcdsm KustoClusterDataSetMapping) MarshalJSON() ([]byte, error) { + kcdsm.Kind = KindBasicDataSetMappingKindKustoCluster + objectMap := make(map[string]interface{}) + if kcdsm.KustoClusterDataSetMappingProperties != nil { + objectMap["properties"] = kcdsm.KustoClusterDataSetMappingProperties + } + if kcdsm.Kind != "" { + objectMap["kind"] = kcdsm.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSetMapping is the BasicDataSetMapping implementation for KustoClusterDataSetMapping. +func (kcdsm KustoClusterDataSetMapping) AsBlobDataSetMapping() (*BlobDataSetMapping, bool) { + return nil, false +} + +// AsBlobFolderDataSetMapping is the BasicDataSetMapping implementation for KustoClusterDataSetMapping. +func (kcdsm KustoClusterDataSetMapping) AsBlobFolderDataSetMapping() (*BlobFolderDataSetMapping, bool) { + return nil, false +} + +// AsBlobContainerDataSetMapping is the BasicDataSetMapping implementation for KustoClusterDataSetMapping. +func (kcdsm KustoClusterDataSetMapping) AsBlobContainerDataSetMapping() (*BlobContainerDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FileDataSetMapping is the BasicDataSetMapping implementation for KustoClusterDataSetMapping. +func (kcdsm KustoClusterDataSetMapping) AsADLSGen2FileDataSetMapping() (*ADLSGen2FileDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSetMapping is the BasicDataSetMapping implementation for KustoClusterDataSetMapping. +func (kcdsm KustoClusterDataSetMapping) AsADLSGen2FolderDataSetMapping() (*ADLSGen2FolderDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSetMapping is the BasicDataSetMapping implementation for KustoClusterDataSetMapping. +func (kcdsm KustoClusterDataSetMapping) AsADLSGen2FileSystemDataSetMapping() (*ADLSGen2FileSystemDataSetMapping, bool) { + return nil, false +} + +// AsKustoClusterDataSetMapping is the BasicDataSetMapping implementation for KustoClusterDataSetMapping. +func (kcdsm KustoClusterDataSetMapping) AsKustoClusterDataSetMapping() (*KustoClusterDataSetMapping, bool) { + return &kcdsm, true +} + +// AsKustoDatabaseDataSetMapping is the BasicDataSetMapping implementation for KustoClusterDataSetMapping. +func (kcdsm KustoClusterDataSetMapping) AsKustoDatabaseDataSetMapping() (*KustoDatabaseDataSetMapping, bool) { + return nil, false +} + +// AsSQLDWTableDataSetMapping is the BasicDataSetMapping implementation for KustoClusterDataSetMapping. +func (kcdsm KustoClusterDataSetMapping) AsSQLDWTableDataSetMapping() (*SQLDWTableDataSetMapping, bool) { + return nil, false +} + +// AsSQLDBTableDataSetMapping is the BasicDataSetMapping implementation for KustoClusterDataSetMapping. +func (kcdsm KustoClusterDataSetMapping) AsSQLDBTableDataSetMapping() (*SQLDBTableDataSetMapping, bool) { + return nil, false +} + +// AsDataSetMapping is the BasicDataSetMapping implementation for KustoClusterDataSetMapping. +func (kcdsm KustoClusterDataSetMapping) AsDataSetMapping() (*DataSetMapping, bool) { + return nil, false +} + +// AsBasicDataSetMapping is the BasicDataSetMapping implementation for KustoClusterDataSetMapping. +func (kcdsm KustoClusterDataSetMapping) AsBasicDataSetMapping() (BasicDataSetMapping, bool) { + return &kcdsm, true +} + +// UnmarshalJSON is the custom unmarshaler for KustoClusterDataSetMapping struct. +func (kcdsm *KustoClusterDataSetMapping) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var kustoClusterDataSetMappingProperties KustoClusterDataSetMappingProperties + err = json.Unmarshal(*v, &kustoClusterDataSetMappingProperties) + if err != nil { + return err + } + kcdsm.KustoClusterDataSetMappingProperties = &kustoClusterDataSetMappingProperties + } + case "kind": + if v != nil { + var kind KindBasicDataSetMapping + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + kcdsm.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + kcdsm.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + kcdsm.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + kcdsm.Type = &typeVar + } + } + } + + return nil +} + +// KustoClusterDataSetMappingProperties properties of the Kusto cluster data set mapping +type KustoClusterDataSetMappingProperties struct { + // DataSetID - The id of the source data set. + DataSetID *string `json:"dataSetId,omitempty"` + // DataSetMappingStatus - READ-ONLY; Gets the status of the data set mapping. Possible values include: 'Ok', 'Broken' + DataSetMappingStatus DataSetMappingStatus `json:"dataSetMappingStatus,omitempty"` + // KustoClusterResourceID - Resource id of the sink kusto cluster. + KustoClusterResourceID *string `json:"kustoClusterResourceId,omitempty"` + // Location - READ-ONLY; Location of the sink kusto cluster. + Location *string `json:"location,omitempty"` + // ProvisioningState - READ-ONLY; Provisioning state of the data set mapping. Possible values include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed' + ProvisioningState ProvisioningState `json:"provisioningState,omitempty"` +} + +// KustoClusterDataSetProperties properties of the kusto cluster data set. +type KustoClusterDataSetProperties struct { + // DataSetID - READ-ONLY; Unique id for identifying a data set resource + DataSetID *string `json:"dataSetId,omitempty"` + // KustoClusterResourceID - Resource id of the kusto cluster. + KustoClusterResourceID *string `json:"kustoClusterResourceId,omitempty"` + // Location - READ-ONLY; Location of the kusto cluster. + Location *string `json:"location,omitempty"` + // ProvisioningState - READ-ONLY; Provisioning state of the kusto cluster data set. Possible values include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed' + ProvisioningState ProvisioningState `json:"provisioningState,omitempty"` +} + +// KustoDatabaseDataSet a kusto database data set. +type KustoDatabaseDataSet struct { + // KustoDatabaseDataSetProperties - Kusto database data set properties. + *KustoDatabaseDataSetProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindDataSet', 'KindBlob', 'KindBlobFolder', 'KindContainer', 'KindAdlsGen2File', 'KindAdlsGen2Folder', 'KindAdlsGen2FileSystem', 'KindAdlsGen1Folder', 'KindAdlsGen1File', 'KindKustoCluster', 'KindKustoDatabase', 'KindSQLDWTable', 'KindSQLDBTable' + Kind Kind `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for KustoDatabaseDataSet. +func (kdds KustoDatabaseDataSet) MarshalJSON() ([]byte, error) { + kdds.Kind = KindKustoDatabase + objectMap := make(map[string]interface{}) + if kdds.KustoDatabaseDataSetProperties != nil { + objectMap["properties"] = kdds.KustoDatabaseDataSetProperties + } + if kdds.Kind != "" { + objectMap["kind"] = kdds.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSet is the BasicDataSet implementation for KustoDatabaseDataSet. +func (kdds KustoDatabaseDataSet) AsBlobDataSet() (*BlobDataSet, bool) { + return nil, false +} + +// AsBlobFolderDataSet is the BasicDataSet implementation for KustoDatabaseDataSet. +func (kdds KustoDatabaseDataSet) AsBlobFolderDataSet() (*BlobFolderDataSet, bool) { + return nil, false +} + +// AsBlobContainerDataSet is the BasicDataSet implementation for KustoDatabaseDataSet. +func (kdds KustoDatabaseDataSet) AsBlobContainerDataSet() (*BlobContainerDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileDataSet is the BasicDataSet implementation for KustoDatabaseDataSet. +func (kdds KustoDatabaseDataSet) AsADLSGen2FileDataSet() (*ADLSGen2FileDataSet, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSet is the BasicDataSet implementation for KustoDatabaseDataSet. +func (kdds KustoDatabaseDataSet) AsADLSGen2FolderDataSet() (*ADLSGen2FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSet is the BasicDataSet implementation for KustoDatabaseDataSet. +func (kdds KustoDatabaseDataSet) AsADLSGen2FileSystemDataSet() (*ADLSGen2FileSystemDataSet, bool) { + return nil, false +} + +// AsADLSGen1FolderDataSet is the BasicDataSet implementation for KustoDatabaseDataSet. +func (kdds KustoDatabaseDataSet) AsADLSGen1FolderDataSet() (*ADLSGen1FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen1FileDataSet is the BasicDataSet implementation for KustoDatabaseDataSet. +func (kdds KustoDatabaseDataSet) AsADLSGen1FileDataSet() (*ADLSGen1FileDataSet, bool) { + return nil, false +} + +// AsKustoClusterDataSet is the BasicDataSet implementation for KustoDatabaseDataSet. +func (kdds KustoDatabaseDataSet) AsKustoClusterDataSet() (*KustoClusterDataSet, bool) { + return nil, false +} + +// AsKustoDatabaseDataSet is the BasicDataSet implementation for KustoDatabaseDataSet. +func (kdds KustoDatabaseDataSet) AsKustoDatabaseDataSet() (*KustoDatabaseDataSet, bool) { + return &kdds, true +} + +// AsSQLDWTableDataSet is the BasicDataSet implementation for KustoDatabaseDataSet. +func (kdds KustoDatabaseDataSet) AsSQLDWTableDataSet() (*SQLDWTableDataSet, bool) { + return nil, false +} + +// AsSQLDBTableDataSet is the BasicDataSet implementation for KustoDatabaseDataSet. +func (kdds KustoDatabaseDataSet) AsSQLDBTableDataSet() (*SQLDBTableDataSet, bool) { + return nil, false +} + +// AsDataSet is the BasicDataSet implementation for KustoDatabaseDataSet. +func (kdds KustoDatabaseDataSet) AsDataSet() (*DataSet, bool) { + return nil, false +} + +// AsBasicDataSet is the BasicDataSet implementation for KustoDatabaseDataSet. +func (kdds KustoDatabaseDataSet) AsBasicDataSet() (BasicDataSet, bool) { + return &kdds, true +} + +// UnmarshalJSON is the custom unmarshaler for KustoDatabaseDataSet struct. +func (kdds *KustoDatabaseDataSet) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var kustoDatabaseDataSetProperties KustoDatabaseDataSetProperties + err = json.Unmarshal(*v, &kustoDatabaseDataSetProperties) + if err != nil { + return err + } + kdds.KustoDatabaseDataSetProperties = &kustoDatabaseDataSetProperties + } + case "kind": + if v != nil { + var kind Kind + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + kdds.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + kdds.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + kdds.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + kdds.Type = &typeVar + } + } + } + + return nil +} + +// KustoDatabaseDataSetMapping a Kusto database data set mapping +type KustoDatabaseDataSetMapping struct { + // KustoDatabaseDataSetMappingProperties - Kusto database data set mapping properties. + *KustoDatabaseDataSetMappingProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindBasicDataSetMappingKindDataSetMapping', 'KindBasicDataSetMappingKindBlob', 'KindBasicDataSetMappingKindBlobFolder', 'KindBasicDataSetMappingKindContainer', 'KindBasicDataSetMappingKindAdlsGen2File', 'KindBasicDataSetMappingKindAdlsGen2Folder', 'KindBasicDataSetMappingKindAdlsGen2FileSystem', 'KindBasicDataSetMappingKindKustoCluster', 'KindBasicDataSetMappingKindKustoDatabase', 'KindBasicDataSetMappingKindSQLDWTable', 'KindBasicDataSetMappingKindSQLDBTable' + Kind KindBasicDataSetMapping `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for KustoDatabaseDataSetMapping. +func (kddsm KustoDatabaseDataSetMapping) MarshalJSON() ([]byte, error) { + kddsm.Kind = KindBasicDataSetMappingKindKustoDatabase + objectMap := make(map[string]interface{}) + if kddsm.KustoDatabaseDataSetMappingProperties != nil { + objectMap["properties"] = kddsm.KustoDatabaseDataSetMappingProperties + } + if kddsm.Kind != "" { + objectMap["kind"] = kddsm.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSetMapping is the BasicDataSetMapping implementation for KustoDatabaseDataSetMapping. +func (kddsm KustoDatabaseDataSetMapping) AsBlobDataSetMapping() (*BlobDataSetMapping, bool) { + return nil, false +} + +// AsBlobFolderDataSetMapping is the BasicDataSetMapping implementation for KustoDatabaseDataSetMapping. +func (kddsm KustoDatabaseDataSetMapping) AsBlobFolderDataSetMapping() (*BlobFolderDataSetMapping, bool) { + return nil, false +} + +// AsBlobContainerDataSetMapping is the BasicDataSetMapping implementation for KustoDatabaseDataSetMapping. +func (kddsm KustoDatabaseDataSetMapping) AsBlobContainerDataSetMapping() (*BlobContainerDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FileDataSetMapping is the BasicDataSetMapping implementation for KustoDatabaseDataSetMapping. +func (kddsm KustoDatabaseDataSetMapping) AsADLSGen2FileDataSetMapping() (*ADLSGen2FileDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSetMapping is the BasicDataSetMapping implementation for KustoDatabaseDataSetMapping. +func (kddsm KustoDatabaseDataSetMapping) AsADLSGen2FolderDataSetMapping() (*ADLSGen2FolderDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSetMapping is the BasicDataSetMapping implementation for KustoDatabaseDataSetMapping. +func (kddsm KustoDatabaseDataSetMapping) AsADLSGen2FileSystemDataSetMapping() (*ADLSGen2FileSystemDataSetMapping, bool) { + return nil, false +} + +// AsKustoClusterDataSetMapping is the BasicDataSetMapping implementation for KustoDatabaseDataSetMapping. +func (kddsm KustoDatabaseDataSetMapping) AsKustoClusterDataSetMapping() (*KustoClusterDataSetMapping, bool) { + return nil, false +} + +// AsKustoDatabaseDataSetMapping is the BasicDataSetMapping implementation for KustoDatabaseDataSetMapping. +func (kddsm KustoDatabaseDataSetMapping) AsKustoDatabaseDataSetMapping() (*KustoDatabaseDataSetMapping, bool) { + return &kddsm, true +} + +// AsSQLDWTableDataSetMapping is the BasicDataSetMapping implementation for KustoDatabaseDataSetMapping. +func (kddsm KustoDatabaseDataSetMapping) AsSQLDWTableDataSetMapping() (*SQLDWTableDataSetMapping, bool) { + return nil, false +} + +// AsSQLDBTableDataSetMapping is the BasicDataSetMapping implementation for KustoDatabaseDataSetMapping. +func (kddsm KustoDatabaseDataSetMapping) AsSQLDBTableDataSetMapping() (*SQLDBTableDataSetMapping, bool) { + return nil, false +} + +// AsDataSetMapping is the BasicDataSetMapping implementation for KustoDatabaseDataSetMapping. +func (kddsm KustoDatabaseDataSetMapping) AsDataSetMapping() (*DataSetMapping, bool) { + return nil, false +} + +// AsBasicDataSetMapping is the BasicDataSetMapping implementation for KustoDatabaseDataSetMapping. +func (kddsm KustoDatabaseDataSetMapping) AsBasicDataSetMapping() (BasicDataSetMapping, bool) { + return &kddsm, true +} + +// UnmarshalJSON is the custom unmarshaler for KustoDatabaseDataSetMapping struct. +func (kddsm *KustoDatabaseDataSetMapping) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var kustoDatabaseDataSetMappingProperties KustoDatabaseDataSetMappingProperties + err = json.Unmarshal(*v, &kustoDatabaseDataSetMappingProperties) + if err != nil { + return err + } + kddsm.KustoDatabaseDataSetMappingProperties = &kustoDatabaseDataSetMappingProperties + } + case "kind": + if v != nil { + var kind KindBasicDataSetMapping + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + kddsm.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + kddsm.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + kddsm.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + kddsm.Type = &typeVar + } + } + } + + return nil +} + +// KustoDatabaseDataSetMappingProperties properties of the Kusto database data set mapping +type KustoDatabaseDataSetMappingProperties struct { + // DataSetID - The id of the source data set. + DataSetID *string `json:"dataSetId,omitempty"` + // DataSetMappingStatus - READ-ONLY; Gets the status of the data set mapping. Possible values include: 'Ok', 'Broken' + DataSetMappingStatus DataSetMappingStatus `json:"dataSetMappingStatus,omitempty"` + // KustoClusterResourceID - Resource id of the sink kusto cluster. + KustoClusterResourceID *string `json:"kustoClusterResourceId,omitempty"` + // Location - READ-ONLY; Location of the sink kusto cluster. + Location *string `json:"location,omitempty"` + // ProvisioningState - READ-ONLY; Provisioning state of the data set mapping. Possible values include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed' + ProvisioningState ProvisioningState `json:"provisioningState,omitempty"` +} + +// KustoDatabaseDataSetProperties properties of the kusto database data set. +type KustoDatabaseDataSetProperties struct { + // DataSetID - READ-ONLY; Unique id for identifying a data set resource + DataSetID *string `json:"dataSetId,omitempty"` + // KustoDatabaseResourceID - Resource id of the kusto database. + KustoDatabaseResourceID *string `json:"kustoDatabaseResourceId,omitempty"` + // Location - READ-ONLY; Location of the kusto cluster. + Location *string `json:"location,omitempty"` + // ProvisioningState - READ-ONLY; Provisioning state of the kusto database data set. Possible values include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed' + ProvisioningState ProvisioningState `json:"provisioningState,omitempty"` +} + +// OperationList list response for get operations. +type OperationList struct { + autorest.Response `json:"-"` + // NextLink - The Url of next result page. + NextLink *string `json:"nextLink,omitempty"` + // Value - Collection of items of type DataTransferObjects. + Value *[]OperationModel `json:"value,omitempty"` +} + +// OperationListIterator provides access to a complete listing of OperationModel values. +type OperationListIterator struct { + i int + page OperationListPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *OperationListIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OperationListIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *OperationListIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter OperationListIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter OperationListIterator) Response() OperationList { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter OperationListIterator) Value() OperationModel { + if !iter.page.NotDone() { + return OperationModel{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the OperationListIterator type. +func NewOperationListIterator(page OperationListPage) OperationListIterator { + return OperationListIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (ol OperationList) IsEmpty() bool { + return ol.Value == nil || len(*ol.Value) == 0 +} + +// operationListPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (ol OperationList) operationListPreparer(ctx context.Context) (*http.Request, error) { + if ol.NextLink == nil || len(to.String(ol.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(ol.NextLink))) +} + +// OperationListPage contains a page of OperationModel values. +type OperationListPage struct { + fn func(context.Context, OperationList) (OperationList, error) + ol OperationList +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *OperationListPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OperationListPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.ol) + if err != nil { + return err + } + page.ol = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *OperationListPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page OperationListPage) NotDone() bool { + return !page.ol.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page OperationListPage) Response() OperationList { + return page.ol +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page OperationListPage) Values() []OperationModel { + if page.ol.IsEmpty() { + return nil + } + return *page.ol.Value +} + +// Creates a new instance of the OperationListPage type. +func NewOperationListPage(getNextPage func(context.Context, OperationList) (OperationList, error)) OperationListPage { + return OperationListPage{fn: getNextPage} +} + +// OperationMetaLogSpecification log specifications for operation api +type OperationMetaLogSpecification struct { + // BlobDuration - blob duration of the log + BlobDuration *string `json:"blobDuration,omitempty"` + // DisplayName - localized name of the log category + DisplayName *string `json:"displayName,omitempty"` + // Name - name of the log category + Name *string `json:"name,omitempty"` +} + +// OperationMetaMetricSpecification metric specifications for the operation +type OperationMetaMetricSpecification struct { + // AggregationType - aggregation type of metric + AggregationType *string `json:"aggregationType,omitempty"` + // Dimensions - properties for dimension + Dimensions *[]DimensionProperties `json:"dimensions,omitempty"` + // DisplayDescription - description of the metric + DisplayDescription *string `json:"displayDescription,omitempty"` + // DisplayName - localized name of the metric + DisplayName *string `json:"displayName,omitempty"` + // EnableRegionalMdmAccount - enable regional mdm account + EnableRegionalMdmAccount *string `json:"enableRegionalMdmAccount,omitempty"` + // InternalMetricName - internal metric name + InternalMetricName *string `json:"internalMetricName,omitempty"` + // Name - name of the metric + Name *string `json:"name,omitempty"` + // ResourceIDDimensionNameOverride - dimension name use to replace resource id if specified + ResourceIDDimensionNameOverride *string `json:"resourceIdDimensionNameOverride,omitempty"` + // SupportedAggregationTypes - supported aggregation types + SupportedAggregationTypes *[]string `json:"supportedAggregationTypes,omitempty"` + // SupportedTimeGrainTypes - supported time grain types + SupportedTimeGrainTypes *[]string `json:"supportedTimeGrainTypes,omitempty"` + // Unit - units for the metric + Unit *string `json:"unit,omitempty"` +} + +// OperationMetaPropertyInfo properties on meta info +type OperationMetaPropertyInfo struct { + // ServiceSpecification - meta service specification + ServiceSpecification *OperationMetaServiceSpecification `json:"serviceSpecification,omitempty"` +} + +// OperationMetaServiceSpecification the operation meta service specification +type OperationMetaServiceSpecification struct { + // LogSpecifications - log specifications for the operation + LogSpecifications *[]OperationMetaLogSpecification `json:"logSpecifications,omitempty"` + // MetricSpecifications - metric specifications for the operation + MetricSpecifications *[]OperationMetaMetricSpecification `json:"metricSpecifications,omitempty"` +} + +// OperationModel the response model for get operations +type OperationModel struct { + // Display - Properties on the operation + Display *OperationModelProperties `json:"display,omitempty"` + // Name - Operation name for display purposes + Name *string `json:"name,omitempty"` + // Origin - origin of the operation + Origin *string `json:"origin,omitempty"` + // OperationMetaPropertyInfo - properties for the operation meta info + *OperationMetaPropertyInfo `json:"properties,omitempty"` +} + +// MarshalJSON is the custom marshaler for OperationModel. +func (om OperationModel) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if om.Display != nil { + objectMap["display"] = om.Display + } + if om.Name != nil { + objectMap["name"] = om.Name + } + if om.Origin != nil { + objectMap["origin"] = om.Origin + } + if om.OperationMetaPropertyInfo != nil { + objectMap["properties"] = om.OperationMetaPropertyInfo + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for OperationModel struct. +func (om *OperationModel) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "display": + if v != nil { + var display OperationModelProperties + err = json.Unmarshal(*v, &display) + if err != nil { + return err + } + om.Display = &display + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + om.Name = &name + } + case "origin": + if v != nil { + var origin string + err = json.Unmarshal(*v, &origin) + if err != nil { + return err + } + om.Origin = &origin + } + case "properties": + if v != nil { + var operationMetaPropertyInfo OperationMetaPropertyInfo + err = json.Unmarshal(*v, &operationMetaPropertyInfo) + if err != nil { + return err + } + om.OperationMetaPropertyInfo = &operationMetaPropertyInfo + } + } + } + + return nil +} + +// OperationModelProperties properties on operations +type OperationModelProperties struct { + // Description - Description of the operation for display purposes + Description *string `json:"description,omitempty"` + // Operation - Name of the operation for display purposes + Operation *string `json:"operation,omitempty"` + // Provider - Name of the provider for display purposes + Provider *string `json:"provider,omitempty"` + // Resource - Name of the resource type for display purposes + Resource *string `json:"resource,omitempty"` +} + +// OperationResponse response for long running operation +type OperationResponse struct { + autorest.Response `json:"-"` + // EndTime - start time + EndTime *date.Time `json:"endTime,omitempty"` + // Error - The error property when status is failed. + Error *ErrorInfo `json:"error,omitempty"` + // StartTime - start time + StartTime *date.Time `json:"startTime,omitempty"` + // Status - Operation state of the long running operation. Possible values include: 'StatusAccepted', 'StatusInProgress', 'StatusTransientFailure', 'StatusSucceeded', 'StatusFailed', 'StatusCanceled' + Status Status `json:"status,omitempty"` +} + +// ProviderShareSubscription a provider side share subscription data transfer object. +type ProviderShareSubscription struct { + autorest.Response `json:"-"` + // ProviderShareSubscriptionProperties - properties of providerShareSubscription + *ProviderShareSubscriptionProperties `json:"properties,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for ProviderShareSubscription. +func (pss ProviderShareSubscription) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if pss.ProviderShareSubscriptionProperties != nil { + objectMap["properties"] = pss.ProviderShareSubscriptionProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for ProviderShareSubscription struct. +func (pss *ProviderShareSubscription) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var providerShareSubscriptionProperties ProviderShareSubscriptionProperties + err = json.Unmarshal(*v, &providerShareSubscriptionProperties) + if err != nil { + return err + } + pss.ProviderShareSubscriptionProperties = &providerShareSubscriptionProperties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + pss.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + pss.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + pss.Type = &typeVar + } + } + } + + return nil +} + +// ProviderShareSubscriptionList list response for get ShareSubscription. +type ProviderShareSubscriptionList struct { + autorest.Response `json:"-"` + // NextLink - The Url of next result page. + NextLink *string `json:"nextLink,omitempty"` + // Value - Collection of items of type DataTransferObjects. + Value *[]ProviderShareSubscription `json:"value,omitempty"` +} + +// ProviderShareSubscriptionListIterator provides access to a complete listing of ProviderShareSubscription +// values. +type ProviderShareSubscriptionListIterator struct { + i int + page ProviderShareSubscriptionListPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *ProviderShareSubscriptionListIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ProviderShareSubscriptionListIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *ProviderShareSubscriptionListIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter ProviderShareSubscriptionListIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter ProviderShareSubscriptionListIterator) Response() ProviderShareSubscriptionList { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter ProviderShareSubscriptionListIterator) Value() ProviderShareSubscription { + if !iter.page.NotDone() { + return ProviderShareSubscription{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the ProviderShareSubscriptionListIterator type. +func NewProviderShareSubscriptionListIterator(page ProviderShareSubscriptionListPage) ProviderShareSubscriptionListIterator { + return ProviderShareSubscriptionListIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (pssl ProviderShareSubscriptionList) IsEmpty() bool { + return pssl.Value == nil || len(*pssl.Value) == 0 +} + +// providerShareSubscriptionListPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (pssl ProviderShareSubscriptionList) providerShareSubscriptionListPreparer(ctx context.Context) (*http.Request, error) { + if pssl.NextLink == nil || len(to.String(pssl.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(pssl.NextLink))) +} + +// ProviderShareSubscriptionListPage contains a page of ProviderShareSubscription values. +type ProviderShareSubscriptionListPage struct { + fn func(context.Context, ProviderShareSubscriptionList) (ProviderShareSubscriptionList, error) + pssl ProviderShareSubscriptionList +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *ProviderShareSubscriptionListPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ProviderShareSubscriptionListPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.pssl) + if err != nil { + return err + } + page.pssl = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *ProviderShareSubscriptionListPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page ProviderShareSubscriptionListPage) NotDone() bool { + return !page.pssl.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page ProviderShareSubscriptionListPage) Response() ProviderShareSubscriptionList { + return page.pssl +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page ProviderShareSubscriptionListPage) Values() []ProviderShareSubscription { + if page.pssl.IsEmpty() { + return nil + } + return *page.pssl.Value +} + +// Creates a new instance of the ProviderShareSubscriptionListPage type. +func NewProviderShareSubscriptionListPage(getNextPage func(context.Context, ProviderShareSubscriptionList) (ProviderShareSubscriptionList, error)) ProviderShareSubscriptionListPage { + return ProviderShareSubscriptionListPage{fn: getNextPage} +} + +// ProviderShareSubscriptionProperties provider share subscription properties +type ProviderShareSubscriptionProperties struct { + // ConsumerEmail - READ-ONLY; Email of the consumer who created the share subscription + ConsumerEmail *string `json:"consumerEmail,omitempty"` + // ConsumerName - READ-ONLY; Name of the consumer who created the share subscription + ConsumerName *string `json:"consumerName,omitempty"` + // ConsumerTenantName - READ-ONLY; Tenant name of the consumer who created the share subscription + ConsumerTenantName *string `json:"consumerTenantName,omitempty"` + // CreatedAt - READ-ONLY; created at + CreatedAt *date.Time `json:"createdAt,omitempty"` + // ProviderEmail - READ-ONLY; Email of the provider who created the share + ProviderEmail *string `json:"providerEmail,omitempty"` + // ProviderName - READ-ONLY; Name of the provider who created the share + ProviderName *string `json:"providerName,omitempty"` + // SharedAt - READ-ONLY; Shared at + SharedAt *date.Time `json:"sharedAt,omitempty"` + // ShareSubscriptionObjectID - READ-ONLY; share Subscription Object Id + ShareSubscriptionObjectID *string `json:"shareSubscriptionObjectId,omitempty"` + // ShareSubscriptionStatus - READ-ONLY; Gets the status of share subscription. Possible values include: 'Active', 'Revoked', 'SourceDeleted', 'Revoking' + ShareSubscriptionStatus ShareSubscriptionStatus `json:"shareSubscriptionStatus,omitempty"` +} + +// ProviderShareSubscriptionsRevokeFuture an abstraction for monitoring and retrieving the results of a +// long-running operation. +type ProviderShareSubscriptionsRevokeFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *ProviderShareSubscriptionsRevokeFuture) Result(client ProviderShareSubscriptionsClient) (pss ProviderShareSubscription, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ProviderShareSubscriptionsRevokeFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("datashare.ProviderShareSubscriptionsRevokeFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if pss.Response.Response, err = future.GetResult(sender); err == nil && pss.Response.Response.StatusCode != http.StatusNoContent { + pss, err = client.RevokeResponder(pss.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ProviderShareSubscriptionsRevokeFuture", "Result", pss.Response.Response, "Failure responding to request") + } + } + return +} + +// ProxyDto base data transfer object implementation for proxy resources. +type ProxyDto struct { + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// ScheduledSourceShareSynchronizationSettingProperties a Scheduled source synchronization setting data +// transfer object. +type ScheduledSourceShareSynchronizationSettingProperties struct { + // RecurrenceInterval - Recurrence Interval. Possible values include: 'Hour', 'Day' + RecurrenceInterval RecurrenceInterval `json:"recurrenceInterval,omitempty"` + // SynchronizationTime - Synchronization time + SynchronizationTime *date.Time `json:"synchronizationTime,omitempty"` +} + +// ScheduledSourceSynchronizationSetting a type of synchronization setting based on schedule +type ScheduledSourceSynchronizationSetting struct { + // ScheduledSourceShareSynchronizationSettingProperties - Properties of scheduled synchronization + *ScheduledSourceShareSynchronizationSettingProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindSourceShareSynchronizationSetting', 'KindScheduleBased' + Kind KindBasicSourceShareSynchronizationSetting `json:"kind,omitempty"` +} + +// MarshalJSON is the custom marshaler for ScheduledSourceSynchronizationSetting. +func (ssss ScheduledSourceSynchronizationSetting) MarshalJSON() ([]byte, error) { + ssss.Kind = KindScheduleBased + objectMap := make(map[string]interface{}) + if ssss.ScheduledSourceShareSynchronizationSettingProperties != nil { + objectMap["properties"] = ssss.ScheduledSourceShareSynchronizationSettingProperties + } + if ssss.Kind != "" { + objectMap["kind"] = ssss.Kind + } + return json.Marshal(objectMap) +} + +// AsScheduledSourceSynchronizationSetting is the BasicSourceShareSynchronizationSetting implementation for ScheduledSourceSynchronizationSetting. +func (ssss ScheduledSourceSynchronizationSetting) AsScheduledSourceSynchronizationSetting() (*ScheduledSourceSynchronizationSetting, bool) { + return &ssss, true +} + +// AsSourceShareSynchronizationSetting is the BasicSourceShareSynchronizationSetting implementation for ScheduledSourceSynchronizationSetting. +func (ssss ScheduledSourceSynchronizationSetting) AsSourceShareSynchronizationSetting() (*SourceShareSynchronizationSetting, bool) { + return nil, false +} + +// AsBasicSourceShareSynchronizationSetting is the BasicSourceShareSynchronizationSetting implementation for ScheduledSourceSynchronizationSetting. +func (ssss ScheduledSourceSynchronizationSetting) AsBasicSourceShareSynchronizationSetting() (BasicSourceShareSynchronizationSetting, bool) { + return &ssss, true +} + +// UnmarshalJSON is the custom unmarshaler for ScheduledSourceSynchronizationSetting struct. +func (ssss *ScheduledSourceSynchronizationSetting) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var scheduledSourceShareSynchronizationSettingProperties ScheduledSourceShareSynchronizationSettingProperties + err = json.Unmarshal(*v, &scheduledSourceShareSynchronizationSettingProperties) + if err != nil { + return err + } + ssss.ScheduledSourceShareSynchronizationSettingProperties = &scheduledSourceShareSynchronizationSettingProperties + } + case "kind": + if v != nil { + var kind KindBasicSourceShareSynchronizationSetting + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + ssss.Kind = kind + } + } + } + + return nil +} + +// ScheduledSynchronizationSetting a type of synchronization setting based on schedule +type ScheduledSynchronizationSetting struct { + // ScheduledSynchronizationSettingProperties - Properties of scheduled synchronization + *ScheduledSynchronizationSettingProperties `json:"properties,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` + // Kind - Possible values include: 'KindBasicSynchronizationSettingKindSynchronizationSetting', 'KindBasicSynchronizationSettingKindScheduleBased' + Kind KindBasicSynchronizationSetting `json:"kind,omitempty"` +} + +// MarshalJSON is the custom marshaler for ScheduledSynchronizationSetting. +func (sss ScheduledSynchronizationSetting) MarshalJSON() ([]byte, error) { + sss.Kind = KindBasicSynchronizationSettingKindScheduleBased + objectMap := make(map[string]interface{}) + if sss.ScheduledSynchronizationSettingProperties != nil { + objectMap["properties"] = sss.ScheduledSynchronizationSettingProperties + } + if sss.Kind != "" { + objectMap["kind"] = sss.Kind + } + return json.Marshal(objectMap) +} + +// AsScheduledSynchronizationSetting is the BasicSynchronizationSetting implementation for ScheduledSynchronizationSetting. +func (sss ScheduledSynchronizationSetting) AsScheduledSynchronizationSetting() (*ScheduledSynchronizationSetting, bool) { + return &sss, true +} + +// AsSynchronizationSetting is the BasicSynchronizationSetting implementation for ScheduledSynchronizationSetting. +func (sss ScheduledSynchronizationSetting) AsSynchronizationSetting() (*SynchronizationSetting, bool) { + return nil, false +} + +// AsBasicSynchronizationSetting is the BasicSynchronizationSetting implementation for ScheduledSynchronizationSetting. +func (sss ScheduledSynchronizationSetting) AsBasicSynchronizationSetting() (BasicSynchronizationSetting, bool) { + return &sss, true +} + +// UnmarshalJSON is the custom unmarshaler for ScheduledSynchronizationSetting struct. +func (sss *ScheduledSynchronizationSetting) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var scheduledSynchronizationSettingProperties ScheduledSynchronizationSettingProperties + err = json.Unmarshal(*v, &scheduledSynchronizationSettingProperties) + if err != nil { + return err + } + sss.ScheduledSynchronizationSettingProperties = &scheduledSynchronizationSettingProperties + } + case "kind": + if v != nil { + var kind KindBasicSynchronizationSetting + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + sss.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + sss.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + sss.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + sss.Type = &typeVar + } + } + } + + return nil +} + +// ScheduledSynchronizationSettingProperties a Scheduled synchronization setting data transfer object. +type ScheduledSynchronizationSettingProperties struct { + // CreatedAt - READ-ONLY; Time at which the synchronization setting was created. + CreatedAt *date.Time `json:"createdAt,omitempty"` + // ProvisioningState - READ-ONLY; Gets or sets the provisioning state. Possible values include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed' + ProvisioningState ProvisioningState `json:"provisioningState,omitempty"` + // RecurrenceInterval - Recurrence Interval. Possible values include: 'Hour', 'Day' + RecurrenceInterval RecurrenceInterval `json:"recurrenceInterval,omitempty"` + // SynchronizationTime - Synchronization time + SynchronizationTime *date.Time `json:"synchronizationTime,omitempty"` + // UserName - READ-ONLY; Name of the user who created the synchronization setting. + UserName *string `json:"userName,omitempty"` +} + +// ScheduledTrigger a type of trigger based on schedule +type ScheduledTrigger struct { + // ScheduledTriggerProperties - Properties of scheduled synchronization + *ScheduledTriggerProperties `json:"properties,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` + // Kind - Possible values include: 'KindBasicTriggerKindTrigger', 'KindBasicTriggerKindScheduleBased' + Kind KindBasicTrigger `json:"kind,omitempty"` +} + +// MarshalJSON is the custom marshaler for ScheduledTrigger. +func (st ScheduledTrigger) MarshalJSON() ([]byte, error) { + st.Kind = KindBasicTriggerKindScheduleBased + objectMap := make(map[string]interface{}) + if st.ScheduledTriggerProperties != nil { + objectMap["properties"] = st.ScheduledTriggerProperties + } + if st.Kind != "" { + objectMap["kind"] = st.Kind + } + return json.Marshal(objectMap) +} + +// AsScheduledTrigger is the BasicTrigger implementation for ScheduledTrigger. +func (st ScheduledTrigger) AsScheduledTrigger() (*ScheduledTrigger, bool) { + return &st, true +} + +// AsTrigger is the BasicTrigger implementation for ScheduledTrigger. +func (st ScheduledTrigger) AsTrigger() (*Trigger, bool) { + return nil, false +} + +// AsBasicTrigger is the BasicTrigger implementation for ScheduledTrigger. +func (st ScheduledTrigger) AsBasicTrigger() (BasicTrigger, bool) { + return &st, true +} + +// UnmarshalJSON is the custom unmarshaler for ScheduledTrigger struct. +func (st *ScheduledTrigger) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var scheduledTriggerProperties ScheduledTriggerProperties + err = json.Unmarshal(*v, &scheduledTriggerProperties) + if err != nil { + return err + } + st.ScheduledTriggerProperties = &scheduledTriggerProperties + } + case "kind": + if v != nil { + var kind KindBasicTrigger + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + st.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + st.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + st.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + st.Type = &typeVar + } + } + } + + return nil +} + +// ScheduledTriggerProperties a Scheduled trigger data transfer object. +type ScheduledTriggerProperties struct { + // CreatedAt - READ-ONLY; Time at which the trigger was created. + CreatedAt *date.Time `json:"createdAt,omitempty"` + // ProvisioningState - READ-ONLY; Gets the provisioning state. Possible values include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed' + ProvisioningState ProvisioningState `json:"provisioningState,omitempty"` + // RecurrenceInterval - Recurrence Interval. Possible values include: 'Hour', 'Day' + RecurrenceInterval RecurrenceInterval `json:"recurrenceInterval,omitempty"` + // SynchronizationMode - Synchronization mode. Possible values include: 'Incremental', 'FullSync' + SynchronizationMode SynchronizationMode `json:"synchronizationMode,omitempty"` + // SynchronizationTime - Synchronization time + SynchronizationTime *date.Time `json:"synchronizationTime,omitempty"` + // TriggerStatus - READ-ONLY; Gets the trigger state. Possible values include: 'TriggerStatusActive', 'TriggerStatusInactive', 'TriggerStatusSourceSynchronizationSettingDeleted' + TriggerStatus TriggerStatus `json:"triggerStatus,omitempty"` + // UserName - READ-ONLY; Name of the user who created the trigger. + UserName *string `json:"userName,omitempty"` +} + +// Share a share data transfer object. +type Share struct { + autorest.Response `json:"-"` + // ShareProperties - Properties on the share + *ShareProperties `json:"properties,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for Share. +func (s Share) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if s.ShareProperties != nil { + objectMap["properties"] = s.ShareProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for Share struct. +func (s *Share) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var shareProperties ShareProperties + err = json.Unmarshal(*v, &shareProperties) + if err != nil { + return err + } + s.ShareProperties = &shareProperties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + s.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + s.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + s.Type = &typeVar + } + } + } + + return nil +} + +// ShareList list response for get Shares. +type ShareList struct { + autorest.Response `json:"-"` + // NextLink - The Url of next result page. + NextLink *string `json:"nextLink,omitempty"` + // Value - Collection of items of type DataTransferObjects. + Value *[]Share `json:"value,omitempty"` +} + +// ShareListIterator provides access to a complete listing of Share values. +type ShareListIterator struct { + i int + page ShareListPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *ShareListIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareListIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *ShareListIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter ShareListIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter ShareListIterator) Response() ShareList { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter ShareListIterator) Value() Share { + if !iter.page.NotDone() { + return Share{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the ShareListIterator type. +func NewShareListIterator(page ShareListPage) ShareListIterator { + return ShareListIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (sl ShareList) IsEmpty() bool { + return sl.Value == nil || len(*sl.Value) == 0 +} + +// shareListPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (sl ShareList) shareListPreparer(ctx context.Context) (*http.Request, error) { + if sl.NextLink == nil || len(to.String(sl.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(sl.NextLink))) +} + +// ShareListPage contains a page of Share values. +type ShareListPage struct { + fn func(context.Context, ShareList) (ShareList, error) + sl ShareList +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *ShareListPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareListPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.sl) + if err != nil { + return err + } + page.sl = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *ShareListPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page ShareListPage) NotDone() bool { + return !page.sl.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page ShareListPage) Response() ShareList { + return page.sl +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page ShareListPage) Values() []Share { + if page.sl.IsEmpty() { + return nil + } + return *page.sl.Value +} + +// Creates a new instance of the ShareListPage type. +func NewShareListPage(getNextPage func(context.Context, ShareList) (ShareList, error)) ShareListPage { + return ShareListPage{fn: getNextPage} +} + +// ShareProperties share property bag. +type ShareProperties struct { + // CreatedAt - READ-ONLY; Time at which the share was created. + CreatedAt *date.Time `json:"createdAt,omitempty"` + // Description - Share description. + Description *string `json:"description,omitempty"` + // ProvisioningState - READ-ONLY; Gets or sets the provisioning state. Possible values include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed' + ProvisioningState ProvisioningState `json:"provisioningState,omitempty"` + // ShareKind - Share kind. Possible values include: 'CopyBased', 'InPlace' + ShareKind ShareKind `json:"shareKind,omitempty"` + // Terms - Share terms. + Terms *string `json:"terms,omitempty"` + // UserEmail - READ-ONLY; Email of the user who created the resource + UserEmail *string `json:"userEmail,omitempty"` + // UserName - READ-ONLY; Name of the user who created the resource + UserName *string `json:"userName,omitempty"` +} + +// SharesDeleteFuture an abstraction for monitoring and retrieving the results of a long-running operation. +type SharesDeleteFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *SharesDeleteFuture) Result(client SharesClient) (or OperationResponse, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SharesDeleteFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("datashare.SharesDeleteFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if or.Response.Response, err = future.GetResult(sender); err == nil && or.Response.Response.StatusCode != http.StatusNoContent { + or, err = client.DeleteResponder(or.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SharesDeleteFuture", "Result", or.Response.Response, "Failure responding to request") + } + } + return +} + +// ShareSubscription a share subscription data transfer object. +type ShareSubscription struct { + autorest.Response `json:"-"` + // ShareSubscriptionProperties - Properties on the share subscription + *ShareSubscriptionProperties `json:"properties,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for ShareSubscription. +func (ss ShareSubscription) MarshalJSON() ([]byte, error) { + objectMap := make(map[string]interface{}) + if ss.ShareSubscriptionProperties != nil { + objectMap["properties"] = ss.ShareSubscriptionProperties + } + return json.Marshal(objectMap) +} + +// UnmarshalJSON is the custom unmarshaler for ShareSubscription struct. +func (ss *ShareSubscription) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var shareSubscriptionProperties ShareSubscriptionProperties + err = json.Unmarshal(*v, &shareSubscriptionProperties) + if err != nil { + return err + } + ss.ShareSubscriptionProperties = &shareSubscriptionProperties + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + ss.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + ss.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + ss.Type = &typeVar + } + } + } + + return nil +} + +// ShareSubscriptionList list response for get ShareSubscription. +type ShareSubscriptionList struct { + autorest.Response `json:"-"` + // NextLink - The Url of next result page. + NextLink *string `json:"nextLink,omitempty"` + // Value - Collection of items of type DataTransferObjects. + Value *[]ShareSubscription `json:"value,omitempty"` +} + +// ShareSubscriptionListIterator provides access to a complete listing of ShareSubscription values. +type ShareSubscriptionListIterator struct { + i int + page ShareSubscriptionListPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *ShareSubscriptionListIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareSubscriptionListIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *ShareSubscriptionListIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter ShareSubscriptionListIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter ShareSubscriptionListIterator) Response() ShareSubscriptionList { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter ShareSubscriptionListIterator) Value() ShareSubscription { + if !iter.page.NotDone() { + return ShareSubscription{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the ShareSubscriptionListIterator type. +func NewShareSubscriptionListIterator(page ShareSubscriptionListPage) ShareSubscriptionListIterator { + return ShareSubscriptionListIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (ssl ShareSubscriptionList) IsEmpty() bool { + return ssl.Value == nil || len(*ssl.Value) == 0 +} + +// shareSubscriptionListPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (ssl ShareSubscriptionList) shareSubscriptionListPreparer(ctx context.Context) (*http.Request, error) { + if ssl.NextLink == nil || len(to.String(ssl.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(ssl.NextLink))) +} + +// ShareSubscriptionListPage contains a page of ShareSubscription values. +type ShareSubscriptionListPage struct { + fn func(context.Context, ShareSubscriptionList) (ShareSubscriptionList, error) + ssl ShareSubscriptionList +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *ShareSubscriptionListPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareSubscriptionListPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.ssl) + if err != nil { + return err + } + page.ssl = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *ShareSubscriptionListPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page ShareSubscriptionListPage) NotDone() bool { + return !page.ssl.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page ShareSubscriptionListPage) Response() ShareSubscriptionList { + return page.ssl +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page ShareSubscriptionListPage) Values() []ShareSubscription { + if page.ssl.IsEmpty() { + return nil + } + return *page.ssl.Value +} + +// Creates a new instance of the ShareSubscriptionListPage type. +func NewShareSubscriptionListPage(getNextPage func(context.Context, ShareSubscriptionList) (ShareSubscriptionList, error)) ShareSubscriptionListPage { + return ShareSubscriptionListPage{fn: getNextPage} +} + +// ShareSubscriptionProperties share subscription property bag. +type ShareSubscriptionProperties struct { + // CreatedAt - READ-ONLY; Time at which the share subscription was created. + CreatedAt *date.Time `json:"createdAt,omitempty"` + // InvitationID - The invitation id. + InvitationID *string `json:"invitationId,omitempty"` + // ProviderEmail - READ-ONLY; Email of the provider who created the resource + ProviderEmail *string `json:"providerEmail,omitempty"` + // ProviderName - READ-ONLY; Name of the provider who created the resource + ProviderName *string `json:"providerName,omitempty"` + // ProviderTenantName - READ-ONLY; Tenant name of the provider who created the resource + ProviderTenantName *string `json:"providerTenantName,omitempty"` + // ProvisioningState - READ-ONLY; Provisioning state of the share subscription. Possible values include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed' + ProvisioningState ProvisioningState `json:"provisioningState,omitempty"` + // ShareDescription - READ-ONLY; Description of share + ShareDescription *string `json:"shareDescription,omitempty"` + // ShareKind - READ-ONLY; Kind of share. Possible values include: 'CopyBased', 'InPlace' + ShareKind ShareKind `json:"shareKind,omitempty"` + // ShareName - READ-ONLY; Name of the share + ShareName *string `json:"shareName,omitempty"` + // ShareSubscriptionStatus - READ-ONLY; Gets the current status of share subscription. Possible values include: 'Active', 'Revoked', 'SourceDeleted', 'Revoking' + ShareSubscriptionStatus ShareSubscriptionStatus `json:"shareSubscriptionStatus,omitempty"` + // ShareTerms - READ-ONLY; Terms of a share + ShareTerms *string `json:"shareTerms,omitempty"` + // SourceShareLocation - Source share location. + SourceShareLocation *string `json:"sourceShareLocation,omitempty"` + // UserEmail - READ-ONLY; Email of the user who created the resource + UserEmail *string `json:"userEmail,omitempty"` + // UserName - READ-ONLY; Name of the user who created the resource + UserName *string `json:"userName,omitempty"` +} + +// ShareSubscriptionsCancelSynchronizationFuture an abstraction for monitoring and retrieving the results +// of a long-running operation. +type ShareSubscriptionsCancelSynchronizationFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *ShareSubscriptionsCancelSynchronizationFuture) Result(client ShareSubscriptionsClient) (sss ShareSubscriptionSynchronization, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsCancelSynchronizationFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("datashare.ShareSubscriptionsCancelSynchronizationFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if sss.Response.Response, err = future.GetResult(sender); err == nil && sss.Response.Response.StatusCode != http.StatusNoContent { + sss, err = client.CancelSynchronizationResponder(sss.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsCancelSynchronizationFuture", "Result", sss.Response.Response, "Failure responding to request") + } + } + return +} + +// ShareSubscriptionsDeleteFuture an abstraction for monitoring and retrieving the results of a +// long-running operation. +type ShareSubscriptionsDeleteFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *ShareSubscriptionsDeleteFuture) Result(client ShareSubscriptionsClient) (or OperationResponse, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsDeleteFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("datashare.ShareSubscriptionsDeleteFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if or.Response.Response, err = future.GetResult(sender); err == nil && or.Response.Response.StatusCode != http.StatusNoContent { + or, err = client.DeleteResponder(or.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsDeleteFuture", "Result", or.Response.Response, "Failure responding to request") + } + } + return +} + +// ShareSubscriptionsSynchronizeMethodFuture an abstraction for monitoring and retrieving the results of a +// long-running operation. +type ShareSubscriptionsSynchronizeMethodFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *ShareSubscriptionsSynchronizeMethodFuture) Result(client ShareSubscriptionsClient) (sss ShareSubscriptionSynchronization, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsSynchronizeMethodFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("datashare.ShareSubscriptionsSynchronizeMethodFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if sss.Response.Response, err = future.GetResult(sender); err == nil && sss.Response.Response.StatusCode != http.StatusNoContent { + sss, err = client.SynchronizeMethodResponder(sss.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsSynchronizeMethodFuture", "Result", sss.Response.Response, "Failure responding to request") + } + } + return +} + +// ShareSubscriptionSynchronization a ShareSubscriptionSynchronization data transfer object. +type ShareSubscriptionSynchronization struct { + autorest.Response `json:"-"` + // DurationMs - READ-ONLY; Synchronization duration + DurationMs *int32 `json:"durationMs,omitempty"` + // EndTime - READ-ONLY; End time of synchronization + EndTime *date.Time `json:"endTime,omitempty"` + // Message - READ-ONLY; message of Synchronization + Message *string `json:"message,omitempty"` + // StartTime - READ-ONLY; start time of synchronization + StartTime *date.Time `json:"startTime,omitempty"` + // Status - READ-ONLY; Raw Status + Status *string `json:"status,omitempty"` + // SynchronizationID - Synchronization id + SynchronizationID *string `json:"synchronizationId,omitempty"` + // SynchronizationMode - READ-ONLY; Synchronization Mode. Possible values include: 'Incremental', 'FullSync' + SynchronizationMode SynchronizationMode `json:"synchronizationMode,omitempty"` +} + +// ShareSubscriptionSynchronizationList a consumer side list of share subscription synchronizations +type ShareSubscriptionSynchronizationList struct { + autorest.Response `json:"-"` + // NextLink - The Url of next result page. + NextLink *string `json:"nextLink,omitempty"` + // Value - Collection of items of type DataTransferObjects. + Value *[]ShareSubscriptionSynchronization `json:"value,omitempty"` +} + +// ShareSubscriptionSynchronizationListIterator provides access to a complete listing of +// ShareSubscriptionSynchronization values. +type ShareSubscriptionSynchronizationListIterator struct { + i int + page ShareSubscriptionSynchronizationListPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *ShareSubscriptionSynchronizationListIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareSubscriptionSynchronizationListIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *ShareSubscriptionSynchronizationListIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter ShareSubscriptionSynchronizationListIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter ShareSubscriptionSynchronizationListIterator) Response() ShareSubscriptionSynchronizationList { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter ShareSubscriptionSynchronizationListIterator) Value() ShareSubscriptionSynchronization { + if !iter.page.NotDone() { + return ShareSubscriptionSynchronization{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the ShareSubscriptionSynchronizationListIterator type. +func NewShareSubscriptionSynchronizationListIterator(page ShareSubscriptionSynchronizationListPage) ShareSubscriptionSynchronizationListIterator { + return ShareSubscriptionSynchronizationListIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (sssl ShareSubscriptionSynchronizationList) IsEmpty() bool { + return sssl.Value == nil || len(*sssl.Value) == 0 +} + +// shareSubscriptionSynchronizationListPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (sssl ShareSubscriptionSynchronizationList) shareSubscriptionSynchronizationListPreparer(ctx context.Context) (*http.Request, error) { + if sssl.NextLink == nil || len(to.String(sssl.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(sssl.NextLink))) +} + +// ShareSubscriptionSynchronizationListPage contains a page of ShareSubscriptionSynchronization values. +type ShareSubscriptionSynchronizationListPage struct { + fn func(context.Context, ShareSubscriptionSynchronizationList) (ShareSubscriptionSynchronizationList, error) + sssl ShareSubscriptionSynchronizationList +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *ShareSubscriptionSynchronizationListPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareSubscriptionSynchronizationListPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.sssl) + if err != nil { + return err + } + page.sssl = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *ShareSubscriptionSynchronizationListPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page ShareSubscriptionSynchronizationListPage) NotDone() bool { + return !page.sssl.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page ShareSubscriptionSynchronizationListPage) Response() ShareSubscriptionSynchronizationList { + return page.sssl +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page ShareSubscriptionSynchronizationListPage) Values() []ShareSubscriptionSynchronization { + if page.sssl.IsEmpty() { + return nil + } + return *page.sssl.Value +} + +// Creates a new instance of the ShareSubscriptionSynchronizationListPage type. +func NewShareSubscriptionSynchronizationListPage(getNextPage func(context.Context, ShareSubscriptionSynchronizationList) (ShareSubscriptionSynchronizationList, error)) ShareSubscriptionSynchronizationListPage { + return ShareSubscriptionSynchronizationListPage{fn: getNextPage} +} + +// ShareSynchronization a ShareSynchronization data transfer object. +type ShareSynchronization struct { + // ConsumerEmail - Email of the user who created the synchronization + ConsumerEmail *string `json:"consumerEmail,omitempty"` + // ConsumerName - Name of the user who created the synchronization + ConsumerName *string `json:"consumerName,omitempty"` + // ConsumerTenantName - Tenant name of the consumer who created the synchronization + ConsumerTenantName *string `json:"consumerTenantName,omitempty"` + // DurationMs - synchronization duration + DurationMs *int32 `json:"durationMs,omitempty"` + // EndTime - End time of synchronization + EndTime *date.Time `json:"endTime,omitempty"` + // Message - message of synchronization + Message *string `json:"message,omitempty"` + // StartTime - start time of synchronization + StartTime *date.Time `json:"startTime,omitempty"` + // Status - Raw Status + Status *string `json:"status,omitempty"` + // SynchronizationID - Synchronization id + SynchronizationID *string `json:"synchronizationId,omitempty"` + // SynchronizationMode - READ-ONLY; Synchronization mode. Possible values include: 'Incremental', 'FullSync' + SynchronizationMode SynchronizationMode `json:"synchronizationMode,omitempty"` +} + +// ShareSynchronizationList list response for get ShareSynchronization. +type ShareSynchronizationList struct { + autorest.Response `json:"-"` + // NextLink - The Url of next result page. + NextLink *string `json:"nextLink,omitempty"` + // Value - Collection of items of type DataTransferObjects. + Value *[]ShareSynchronization `json:"value,omitempty"` +} + +// ShareSynchronizationListIterator provides access to a complete listing of ShareSynchronization values. +type ShareSynchronizationListIterator struct { + i int + page ShareSynchronizationListPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *ShareSynchronizationListIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareSynchronizationListIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *ShareSynchronizationListIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter ShareSynchronizationListIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter ShareSynchronizationListIterator) Response() ShareSynchronizationList { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter ShareSynchronizationListIterator) Value() ShareSynchronization { + if !iter.page.NotDone() { + return ShareSynchronization{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the ShareSynchronizationListIterator type. +func NewShareSynchronizationListIterator(page ShareSynchronizationListPage) ShareSynchronizationListIterator { + return ShareSynchronizationListIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (ssl ShareSynchronizationList) IsEmpty() bool { + return ssl.Value == nil || len(*ssl.Value) == 0 +} + +// shareSynchronizationListPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (ssl ShareSynchronizationList) shareSynchronizationListPreparer(ctx context.Context) (*http.Request, error) { + if ssl.NextLink == nil || len(to.String(ssl.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(ssl.NextLink))) +} + +// ShareSynchronizationListPage contains a page of ShareSynchronization values. +type ShareSynchronizationListPage struct { + fn func(context.Context, ShareSynchronizationList) (ShareSynchronizationList, error) + ssl ShareSynchronizationList +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *ShareSynchronizationListPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareSynchronizationListPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.ssl) + if err != nil { + return err + } + page.ssl = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *ShareSynchronizationListPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page ShareSynchronizationListPage) NotDone() bool { + return !page.ssl.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page ShareSynchronizationListPage) Response() ShareSynchronizationList { + return page.ssl +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page ShareSynchronizationListPage) Values() []ShareSynchronization { + if page.ssl.IsEmpty() { + return nil + } + return *page.ssl.Value +} + +// Creates a new instance of the ShareSynchronizationListPage type. +func NewShareSynchronizationListPage(getNextPage func(context.Context, ShareSynchronizationList) (ShareSynchronizationList, error)) ShareSynchronizationListPage { + return ShareSynchronizationListPage{fn: getNextPage} +} + +// BasicSourceShareSynchronizationSetting a view of synchronization setting added by the provider +type BasicSourceShareSynchronizationSetting interface { + AsScheduledSourceSynchronizationSetting() (*ScheduledSourceSynchronizationSetting, bool) + AsSourceShareSynchronizationSetting() (*SourceShareSynchronizationSetting, bool) +} + +// SourceShareSynchronizationSetting a view of synchronization setting added by the provider +type SourceShareSynchronizationSetting struct { + // Kind - Possible values include: 'KindSourceShareSynchronizationSetting', 'KindScheduleBased' + Kind KindBasicSourceShareSynchronizationSetting `json:"kind,omitempty"` +} + +func unmarshalBasicSourceShareSynchronizationSetting(body []byte) (BasicSourceShareSynchronizationSetting, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["kind"] { + case string(KindScheduleBased): + var ssss ScheduledSourceSynchronizationSetting + err := json.Unmarshal(body, &ssss) + return ssss, err + default: + var ssss SourceShareSynchronizationSetting + err := json.Unmarshal(body, &ssss) + return ssss, err + } +} +func unmarshalBasicSourceShareSynchronizationSettingArray(body []byte) ([]BasicSourceShareSynchronizationSetting, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + ssssArray := make([]BasicSourceShareSynchronizationSetting, len(rawMessages)) + + for index, rawMessage := range rawMessages { + ssss, err := unmarshalBasicSourceShareSynchronizationSetting(*rawMessage) + if err != nil { + return nil, err + } + ssssArray[index] = ssss + } + return ssssArray, nil +} + +// MarshalJSON is the custom marshaler for SourceShareSynchronizationSetting. +func (ssss SourceShareSynchronizationSetting) MarshalJSON() ([]byte, error) { + ssss.Kind = KindSourceShareSynchronizationSetting + objectMap := make(map[string]interface{}) + if ssss.Kind != "" { + objectMap["kind"] = ssss.Kind + } + return json.Marshal(objectMap) +} + +// AsScheduledSourceSynchronizationSetting is the BasicSourceShareSynchronizationSetting implementation for SourceShareSynchronizationSetting. +func (ssss SourceShareSynchronizationSetting) AsScheduledSourceSynchronizationSetting() (*ScheduledSourceSynchronizationSetting, bool) { + return nil, false +} + +// AsSourceShareSynchronizationSetting is the BasicSourceShareSynchronizationSetting implementation for SourceShareSynchronizationSetting. +func (ssss SourceShareSynchronizationSetting) AsSourceShareSynchronizationSetting() (*SourceShareSynchronizationSetting, bool) { + return &ssss, true +} + +// AsBasicSourceShareSynchronizationSetting is the BasicSourceShareSynchronizationSetting implementation for SourceShareSynchronizationSetting. +func (ssss SourceShareSynchronizationSetting) AsBasicSourceShareSynchronizationSetting() (BasicSourceShareSynchronizationSetting, bool) { + return &ssss, true +} + +// SourceShareSynchronizationSettingList list response for get source share Synchronization settings +type SourceShareSynchronizationSettingList struct { + autorest.Response `json:"-"` + // NextLink - The Url of next result page. + NextLink *string `json:"nextLink,omitempty"` + // Value - Collection of items of type DataTransferObjects. + Value *[]BasicSourceShareSynchronizationSetting `json:"value,omitempty"` +} + +// UnmarshalJSON is the custom unmarshaler for SourceShareSynchronizationSettingList struct. +func (ssssl *SourceShareSynchronizationSettingList) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "nextLink": + if v != nil { + var nextLink string + err = json.Unmarshal(*v, &nextLink) + if err != nil { + return err + } + ssssl.NextLink = &nextLink + } + case "value": + if v != nil { + value, err := unmarshalBasicSourceShareSynchronizationSettingArray(*v) + if err != nil { + return err + } + ssssl.Value = &value + } + } + } + + return nil +} + +// SourceShareSynchronizationSettingListIterator provides access to a complete listing of +// SourceShareSynchronizationSetting values. +type SourceShareSynchronizationSettingListIterator struct { + i int + page SourceShareSynchronizationSettingListPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *SourceShareSynchronizationSettingListIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SourceShareSynchronizationSettingListIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *SourceShareSynchronizationSettingListIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter SourceShareSynchronizationSettingListIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter SourceShareSynchronizationSettingListIterator) Response() SourceShareSynchronizationSettingList { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter SourceShareSynchronizationSettingListIterator) Value() BasicSourceShareSynchronizationSetting { + if !iter.page.NotDone() { + return SourceShareSynchronizationSetting{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the SourceShareSynchronizationSettingListIterator type. +func NewSourceShareSynchronizationSettingListIterator(page SourceShareSynchronizationSettingListPage) SourceShareSynchronizationSettingListIterator { + return SourceShareSynchronizationSettingListIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (ssssl SourceShareSynchronizationSettingList) IsEmpty() bool { + return ssssl.Value == nil || len(*ssssl.Value) == 0 +} + +// sourceShareSynchronizationSettingListPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (ssssl SourceShareSynchronizationSettingList) sourceShareSynchronizationSettingListPreparer(ctx context.Context) (*http.Request, error) { + if ssssl.NextLink == nil || len(to.String(ssssl.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(ssssl.NextLink))) +} + +// SourceShareSynchronizationSettingListPage contains a page of BasicSourceShareSynchronizationSetting +// values. +type SourceShareSynchronizationSettingListPage struct { + fn func(context.Context, SourceShareSynchronizationSettingList) (SourceShareSynchronizationSettingList, error) + ssssl SourceShareSynchronizationSettingList +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *SourceShareSynchronizationSettingListPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SourceShareSynchronizationSettingListPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.ssssl) + if err != nil { + return err + } + page.ssssl = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *SourceShareSynchronizationSettingListPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page SourceShareSynchronizationSettingListPage) NotDone() bool { + return !page.ssssl.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page SourceShareSynchronizationSettingListPage) Response() SourceShareSynchronizationSettingList { + return page.ssssl +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page SourceShareSynchronizationSettingListPage) Values() []BasicSourceShareSynchronizationSetting { + if page.ssssl.IsEmpty() { + return nil + } + return *page.ssssl.Value +} + +// Creates a new instance of the SourceShareSynchronizationSettingListPage type. +func NewSourceShareSynchronizationSettingListPage(getNextPage func(context.Context, SourceShareSynchronizationSettingList) (SourceShareSynchronizationSettingList, error)) SourceShareSynchronizationSettingListPage { + return SourceShareSynchronizationSettingListPage{fn: getNextPage} +} + +// SQLDBTableDataSet a SQL DB table data set. +type SQLDBTableDataSet struct { + // SQLDBTableProperties - SQL DB table data set properties. + *SQLDBTableProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindDataSet', 'KindBlob', 'KindBlobFolder', 'KindContainer', 'KindAdlsGen2File', 'KindAdlsGen2Folder', 'KindAdlsGen2FileSystem', 'KindAdlsGen1Folder', 'KindAdlsGen1File', 'KindKustoCluster', 'KindKustoDatabase', 'KindSQLDWTable', 'KindSQLDBTable' + Kind Kind `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for SQLDBTableDataSet. +func (sdtds SQLDBTableDataSet) MarshalJSON() ([]byte, error) { + sdtds.Kind = KindSQLDBTable + objectMap := make(map[string]interface{}) + if sdtds.SQLDBTableProperties != nil { + objectMap["properties"] = sdtds.SQLDBTableProperties + } + if sdtds.Kind != "" { + objectMap["kind"] = sdtds.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSet is the BasicDataSet implementation for SQLDBTableDataSet. +func (sdtds SQLDBTableDataSet) AsBlobDataSet() (*BlobDataSet, bool) { + return nil, false +} + +// AsBlobFolderDataSet is the BasicDataSet implementation for SQLDBTableDataSet. +func (sdtds SQLDBTableDataSet) AsBlobFolderDataSet() (*BlobFolderDataSet, bool) { + return nil, false +} + +// AsBlobContainerDataSet is the BasicDataSet implementation for SQLDBTableDataSet. +func (sdtds SQLDBTableDataSet) AsBlobContainerDataSet() (*BlobContainerDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileDataSet is the BasicDataSet implementation for SQLDBTableDataSet. +func (sdtds SQLDBTableDataSet) AsADLSGen2FileDataSet() (*ADLSGen2FileDataSet, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSet is the BasicDataSet implementation for SQLDBTableDataSet. +func (sdtds SQLDBTableDataSet) AsADLSGen2FolderDataSet() (*ADLSGen2FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSet is the BasicDataSet implementation for SQLDBTableDataSet. +func (sdtds SQLDBTableDataSet) AsADLSGen2FileSystemDataSet() (*ADLSGen2FileSystemDataSet, bool) { + return nil, false +} + +// AsADLSGen1FolderDataSet is the BasicDataSet implementation for SQLDBTableDataSet. +func (sdtds SQLDBTableDataSet) AsADLSGen1FolderDataSet() (*ADLSGen1FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen1FileDataSet is the BasicDataSet implementation for SQLDBTableDataSet. +func (sdtds SQLDBTableDataSet) AsADLSGen1FileDataSet() (*ADLSGen1FileDataSet, bool) { + return nil, false +} + +// AsKustoClusterDataSet is the BasicDataSet implementation for SQLDBTableDataSet. +func (sdtds SQLDBTableDataSet) AsKustoClusterDataSet() (*KustoClusterDataSet, bool) { + return nil, false +} + +// AsKustoDatabaseDataSet is the BasicDataSet implementation for SQLDBTableDataSet. +func (sdtds SQLDBTableDataSet) AsKustoDatabaseDataSet() (*KustoDatabaseDataSet, bool) { + return nil, false +} + +// AsSQLDWTableDataSet is the BasicDataSet implementation for SQLDBTableDataSet. +func (sdtds SQLDBTableDataSet) AsSQLDWTableDataSet() (*SQLDWTableDataSet, bool) { + return nil, false +} + +// AsSQLDBTableDataSet is the BasicDataSet implementation for SQLDBTableDataSet. +func (sdtds SQLDBTableDataSet) AsSQLDBTableDataSet() (*SQLDBTableDataSet, bool) { + return &sdtds, true +} + +// AsDataSet is the BasicDataSet implementation for SQLDBTableDataSet. +func (sdtds SQLDBTableDataSet) AsDataSet() (*DataSet, bool) { + return nil, false +} + +// AsBasicDataSet is the BasicDataSet implementation for SQLDBTableDataSet. +func (sdtds SQLDBTableDataSet) AsBasicDataSet() (BasicDataSet, bool) { + return &sdtds, true +} + +// UnmarshalJSON is the custom unmarshaler for SQLDBTableDataSet struct. +func (sdtds *SQLDBTableDataSet) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var sQLDBTableProperties SQLDBTableProperties + err = json.Unmarshal(*v, &sQLDBTableProperties) + if err != nil { + return err + } + sdtds.SQLDBTableProperties = &sQLDBTableProperties + } + case "kind": + if v != nil { + var kind Kind + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + sdtds.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + sdtds.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + sdtds.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + sdtds.Type = &typeVar + } + } + } + + return nil +} + +// SQLDBTableDataSetMapping a SQL DB Table data set mapping. +type SQLDBTableDataSetMapping struct { + // SQLDBTableDataSetMappingProperties - Sql DB data set mapping properties. + *SQLDBTableDataSetMappingProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindBasicDataSetMappingKindDataSetMapping', 'KindBasicDataSetMappingKindBlob', 'KindBasicDataSetMappingKindBlobFolder', 'KindBasicDataSetMappingKindContainer', 'KindBasicDataSetMappingKindAdlsGen2File', 'KindBasicDataSetMappingKindAdlsGen2Folder', 'KindBasicDataSetMappingKindAdlsGen2FileSystem', 'KindBasicDataSetMappingKindKustoCluster', 'KindBasicDataSetMappingKindKustoDatabase', 'KindBasicDataSetMappingKindSQLDWTable', 'KindBasicDataSetMappingKindSQLDBTable' + Kind KindBasicDataSetMapping `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for SQLDBTableDataSetMapping. +func (sdtdsm SQLDBTableDataSetMapping) MarshalJSON() ([]byte, error) { + sdtdsm.Kind = KindBasicDataSetMappingKindSQLDBTable + objectMap := make(map[string]interface{}) + if sdtdsm.SQLDBTableDataSetMappingProperties != nil { + objectMap["properties"] = sdtdsm.SQLDBTableDataSetMappingProperties + } + if sdtdsm.Kind != "" { + objectMap["kind"] = sdtdsm.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSetMapping is the BasicDataSetMapping implementation for SQLDBTableDataSetMapping. +func (sdtdsm SQLDBTableDataSetMapping) AsBlobDataSetMapping() (*BlobDataSetMapping, bool) { + return nil, false +} + +// AsBlobFolderDataSetMapping is the BasicDataSetMapping implementation for SQLDBTableDataSetMapping. +func (sdtdsm SQLDBTableDataSetMapping) AsBlobFolderDataSetMapping() (*BlobFolderDataSetMapping, bool) { + return nil, false +} + +// AsBlobContainerDataSetMapping is the BasicDataSetMapping implementation for SQLDBTableDataSetMapping. +func (sdtdsm SQLDBTableDataSetMapping) AsBlobContainerDataSetMapping() (*BlobContainerDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FileDataSetMapping is the BasicDataSetMapping implementation for SQLDBTableDataSetMapping. +func (sdtdsm SQLDBTableDataSetMapping) AsADLSGen2FileDataSetMapping() (*ADLSGen2FileDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSetMapping is the BasicDataSetMapping implementation for SQLDBTableDataSetMapping. +func (sdtdsm SQLDBTableDataSetMapping) AsADLSGen2FolderDataSetMapping() (*ADLSGen2FolderDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSetMapping is the BasicDataSetMapping implementation for SQLDBTableDataSetMapping. +func (sdtdsm SQLDBTableDataSetMapping) AsADLSGen2FileSystemDataSetMapping() (*ADLSGen2FileSystemDataSetMapping, bool) { + return nil, false +} + +// AsKustoClusterDataSetMapping is the BasicDataSetMapping implementation for SQLDBTableDataSetMapping. +func (sdtdsm SQLDBTableDataSetMapping) AsKustoClusterDataSetMapping() (*KustoClusterDataSetMapping, bool) { + return nil, false +} + +// AsKustoDatabaseDataSetMapping is the BasicDataSetMapping implementation for SQLDBTableDataSetMapping. +func (sdtdsm SQLDBTableDataSetMapping) AsKustoDatabaseDataSetMapping() (*KustoDatabaseDataSetMapping, bool) { + return nil, false +} + +// AsSQLDWTableDataSetMapping is the BasicDataSetMapping implementation for SQLDBTableDataSetMapping. +func (sdtdsm SQLDBTableDataSetMapping) AsSQLDWTableDataSetMapping() (*SQLDWTableDataSetMapping, bool) { + return nil, false +} + +// AsSQLDBTableDataSetMapping is the BasicDataSetMapping implementation for SQLDBTableDataSetMapping. +func (sdtdsm SQLDBTableDataSetMapping) AsSQLDBTableDataSetMapping() (*SQLDBTableDataSetMapping, bool) { + return &sdtdsm, true +} + +// AsDataSetMapping is the BasicDataSetMapping implementation for SQLDBTableDataSetMapping. +func (sdtdsm SQLDBTableDataSetMapping) AsDataSetMapping() (*DataSetMapping, bool) { + return nil, false +} + +// AsBasicDataSetMapping is the BasicDataSetMapping implementation for SQLDBTableDataSetMapping. +func (sdtdsm SQLDBTableDataSetMapping) AsBasicDataSetMapping() (BasicDataSetMapping, bool) { + return &sdtdsm, true +} + +// UnmarshalJSON is the custom unmarshaler for SQLDBTableDataSetMapping struct. +func (sdtdsm *SQLDBTableDataSetMapping) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var sQLDBTableDataSetMappingProperties SQLDBTableDataSetMappingProperties + err = json.Unmarshal(*v, &sQLDBTableDataSetMappingProperties) + if err != nil { + return err + } + sdtdsm.SQLDBTableDataSetMappingProperties = &sQLDBTableDataSetMappingProperties + } + case "kind": + if v != nil { + var kind KindBasicDataSetMapping + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + sdtdsm.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + sdtdsm.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + sdtdsm.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + sdtdsm.Type = &typeVar + } + } + } + + return nil +} + +// SQLDBTableDataSetMappingProperties properties of the SQL DB table data set mapping. +type SQLDBTableDataSetMappingProperties struct { + // DatabaseName - DatabaseName name of the sink data set + DatabaseName *string `json:"databaseName,omitempty"` + // DataSetID - The id of the source data set. + DataSetID *string `json:"dataSetId,omitempty"` + // DataSetMappingStatus - READ-ONLY; Gets the status of the data set mapping. Possible values include: 'Ok', 'Broken' + DataSetMappingStatus DataSetMappingStatus `json:"dataSetMappingStatus,omitempty"` + // ProvisioningState - READ-ONLY; Provisioning state of the data set mapping. Possible values include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed' + ProvisioningState ProvisioningState `json:"provisioningState,omitempty"` + // SchemaName - Schema of the table. Default value is dbo. + SchemaName *string `json:"schemaName,omitempty"` + // SQLServerResourceID - Resource id of SQL server + SQLServerResourceID *string `json:"sqlServerResourceId,omitempty"` + // TableName - SQL DB table name. + TableName *string `json:"tableName,omitempty"` +} + +// SQLDBTableProperties properties of the SQL DB table data set. +type SQLDBTableProperties struct { + // DatabaseName - Database name of the source data set + DatabaseName *string `json:"databaseName,omitempty"` + // DataSetID - READ-ONLY; Unique id for identifying a data set resource + DataSetID *string `json:"dataSetId,omitempty"` + // SchemaName - Schema of the table. Default value is dbo. + SchemaName *string `json:"schemaName,omitempty"` + // SQLServerResourceID - Resource id of SQL server + SQLServerResourceID *string `json:"sqlServerResourceId,omitempty"` + // TableName - SQL DB table name. + TableName *string `json:"tableName,omitempty"` +} + +// SQLDWTableDataSet a SQL DW table data set. +type SQLDWTableDataSet struct { + // SQLDWTableProperties - SQL DW table data set properties. + *SQLDWTableProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindDataSet', 'KindBlob', 'KindBlobFolder', 'KindContainer', 'KindAdlsGen2File', 'KindAdlsGen2Folder', 'KindAdlsGen2FileSystem', 'KindAdlsGen1Folder', 'KindAdlsGen1File', 'KindKustoCluster', 'KindKustoDatabase', 'KindSQLDWTable', 'KindSQLDBTable' + Kind Kind `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for SQLDWTableDataSet. +func (sdtds SQLDWTableDataSet) MarshalJSON() ([]byte, error) { + sdtds.Kind = KindSQLDWTable + objectMap := make(map[string]interface{}) + if sdtds.SQLDWTableProperties != nil { + objectMap["properties"] = sdtds.SQLDWTableProperties + } + if sdtds.Kind != "" { + objectMap["kind"] = sdtds.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSet is the BasicDataSet implementation for SQLDWTableDataSet. +func (sdtds SQLDWTableDataSet) AsBlobDataSet() (*BlobDataSet, bool) { + return nil, false +} + +// AsBlobFolderDataSet is the BasicDataSet implementation for SQLDWTableDataSet. +func (sdtds SQLDWTableDataSet) AsBlobFolderDataSet() (*BlobFolderDataSet, bool) { + return nil, false +} + +// AsBlobContainerDataSet is the BasicDataSet implementation for SQLDWTableDataSet. +func (sdtds SQLDWTableDataSet) AsBlobContainerDataSet() (*BlobContainerDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileDataSet is the BasicDataSet implementation for SQLDWTableDataSet. +func (sdtds SQLDWTableDataSet) AsADLSGen2FileDataSet() (*ADLSGen2FileDataSet, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSet is the BasicDataSet implementation for SQLDWTableDataSet. +func (sdtds SQLDWTableDataSet) AsADLSGen2FolderDataSet() (*ADLSGen2FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSet is the BasicDataSet implementation for SQLDWTableDataSet. +func (sdtds SQLDWTableDataSet) AsADLSGen2FileSystemDataSet() (*ADLSGen2FileSystemDataSet, bool) { + return nil, false +} + +// AsADLSGen1FolderDataSet is the BasicDataSet implementation for SQLDWTableDataSet. +func (sdtds SQLDWTableDataSet) AsADLSGen1FolderDataSet() (*ADLSGen1FolderDataSet, bool) { + return nil, false +} + +// AsADLSGen1FileDataSet is the BasicDataSet implementation for SQLDWTableDataSet. +func (sdtds SQLDWTableDataSet) AsADLSGen1FileDataSet() (*ADLSGen1FileDataSet, bool) { + return nil, false +} + +// AsKustoClusterDataSet is the BasicDataSet implementation for SQLDWTableDataSet. +func (sdtds SQLDWTableDataSet) AsKustoClusterDataSet() (*KustoClusterDataSet, bool) { + return nil, false +} + +// AsKustoDatabaseDataSet is the BasicDataSet implementation for SQLDWTableDataSet. +func (sdtds SQLDWTableDataSet) AsKustoDatabaseDataSet() (*KustoDatabaseDataSet, bool) { + return nil, false +} + +// AsSQLDWTableDataSet is the BasicDataSet implementation for SQLDWTableDataSet. +func (sdtds SQLDWTableDataSet) AsSQLDWTableDataSet() (*SQLDWTableDataSet, bool) { + return &sdtds, true +} + +// AsSQLDBTableDataSet is the BasicDataSet implementation for SQLDWTableDataSet. +func (sdtds SQLDWTableDataSet) AsSQLDBTableDataSet() (*SQLDBTableDataSet, bool) { + return nil, false +} + +// AsDataSet is the BasicDataSet implementation for SQLDWTableDataSet. +func (sdtds SQLDWTableDataSet) AsDataSet() (*DataSet, bool) { + return nil, false +} + +// AsBasicDataSet is the BasicDataSet implementation for SQLDWTableDataSet. +func (sdtds SQLDWTableDataSet) AsBasicDataSet() (BasicDataSet, bool) { + return &sdtds, true +} + +// UnmarshalJSON is the custom unmarshaler for SQLDWTableDataSet struct. +func (sdtds *SQLDWTableDataSet) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var sQLDWTableProperties SQLDWTableProperties + err = json.Unmarshal(*v, &sQLDWTableProperties) + if err != nil { + return err + } + sdtds.SQLDWTableProperties = &sQLDWTableProperties + } + case "kind": + if v != nil { + var kind Kind + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + sdtds.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + sdtds.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + sdtds.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + sdtds.Type = &typeVar + } + } + } + + return nil +} + +// SQLDWTableDataSetMapping a SQL DW Table data set mapping. +type SQLDWTableDataSetMapping struct { + // SQLDWTableDataSetMappingProperties - Sql DW data set mapping properties. + *SQLDWTableDataSetMappingProperties `json:"properties,omitempty"` + // Kind - Possible values include: 'KindBasicDataSetMappingKindDataSetMapping', 'KindBasicDataSetMappingKindBlob', 'KindBasicDataSetMappingKindBlobFolder', 'KindBasicDataSetMappingKindContainer', 'KindBasicDataSetMappingKindAdlsGen2File', 'KindBasicDataSetMappingKindAdlsGen2Folder', 'KindBasicDataSetMappingKindAdlsGen2FileSystem', 'KindBasicDataSetMappingKindKustoCluster', 'KindBasicDataSetMappingKindKustoDatabase', 'KindBasicDataSetMappingKindSQLDWTable', 'KindBasicDataSetMappingKindSQLDBTable' + Kind KindBasicDataSetMapping `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +// MarshalJSON is the custom marshaler for SQLDWTableDataSetMapping. +func (sdtdsm SQLDWTableDataSetMapping) MarshalJSON() ([]byte, error) { + sdtdsm.Kind = KindBasicDataSetMappingKindSQLDWTable + objectMap := make(map[string]interface{}) + if sdtdsm.SQLDWTableDataSetMappingProperties != nil { + objectMap["properties"] = sdtdsm.SQLDWTableDataSetMappingProperties + } + if sdtdsm.Kind != "" { + objectMap["kind"] = sdtdsm.Kind + } + return json.Marshal(objectMap) +} + +// AsBlobDataSetMapping is the BasicDataSetMapping implementation for SQLDWTableDataSetMapping. +func (sdtdsm SQLDWTableDataSetMapping) AsBlobDataSetMapping() (*BlobDataSetMapping, bool) { + return nil, false +} + +// AsBlobFolderDataSetMapping is the BasicDataSetMapping implementation for SQLDWTableDataSetMapping. +func (sdtdsm SQLDWTableDataSetMapping) AsBlobFolderDataSetMapping() (*BlobFolderDataSetMapping, bool) { + return nil, false +} + +// AsBlobContainerDataSetMapping is the BasicDataSetMapping implementation for SQLDWTableDataSetMapping. +func (sdtdsm SQLDWTableDataSetMapping) AsBlobContainerDataSetMapping() (*BlobContainerDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FileDataSetMapping is the BasicDataSetMapping implementation for SQLDWTableDataSetMapping. +func (sdtdsm SQLDWTableDataSetMapping) AsADLSGen2FileDataSetMapping() (*ADLSGen2FileDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FolderDataSetMapping is the BasicDataSetMapping implementation for SQLDWTableDataSetMapping. +func (sdtdsm SQLDWTableDataSetMapping) AsADLSGen2FolderDataSetMapping() (*ADLSGen2FolderDataSetMapping, bool) { + return nil, false +} + +// AsADLSGen2FileSystemDataSetMapping is the BasicDataSetMapping implementation for SQLDWTableDataSetMapping. +func (sdtdsm SQLDWTableDataSetMapping) AsADLSGen2FileSystemDataSetMapping() (*ADLSGen2FileSystemDataSetMapping, bool) { + return nil, false +} + +// AsKustoClusterDataSetMapping is the BasicDataSetMapping implementation for SQLDWTableDataSetMapping. +func (sdtdsm SQLDWTableDataSetMapping) AsKustoClusterDataSetMapping() (*KustoClusterDataSetMapping, bool) { + return nil, false +} + +// AsKustoDatabaseDataSetMapping is the BasicDataSetMapping implementation for SQLDWTableDataSetMapping. +func (sdtdsm SQLDWTableDataSetMapping) AsKustoDatabaseDataSetMapping() (*KustoDatabaseDataSetMapping, bool) { + return nil, false +} + +// AsSQLDWTableDataSetMapping is the BasicDataSetMapping implementation for SQLDWTableDataSetMapping. +func (sdtdsm SQLDWTableDataSetMapping) AsSQLDWTableDataSetMapping() (*SQLDWTableDataSetMapping, bool) { + return &sdtdsm, true +} + +// AsSQLDBTableDataSetMapping is the BasicDataSetMapping implementation for SQLDWTableDataSetMapping. +func (sdtdsm SQLDWTableDataSetMapping) AsSQLDBTableDataSetMapping() (*SQLDBTableDataSetMapping, bool) { + return nil, false +} + +// AsDataSetMapping is the BasicDataSetMapping implementation for SQLDWTableDataSetMapping. +func (sdtdsm SQLDWTableDataSetMapping) AsDataSetMapping() (*DataSetMapping, bool) { + return nil, false +} + +// AsBasicDataSetMapping is the BasicDataSetMapping implementation for SQLDWTableDataSetMapping. +func (sdtdsm SQLDWTableDataSetMapping) AsBasicDataSetMapping() (BasicDataSetMapping, bool) { + return &sdtdsm, true +} + +// UnmarshalJSON is the custom unmarshaler for SQLDWTableDataSetMapping struct. +func (sdtdsm *SQLDWTableDataSetMapping) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "properties": + if v != nil { + var sQLDWTableDataSetMappingProperties SQLDWTableDataSetMappingProperties + err = json.Unmarshal(*v, &sQLDWTableDataSetMappingProperties) + if err != nil { + return err + } + sdtdsm.SQLDWTableDataSetMappingProperties = &sQLDWTableDataSetMappingProperties + } + case "kind": + if v != nil { + var kind KindBasicDataSetMapping + err = json.Unmarshal(*v, &kind) + if err != nil { + return err + } + sdtdsm.Kind = kind + } + case "id": + if v != nil { + var ID string + err = json.Unmarshal(*v, &ID) + if err != nil { + return err + } + sdtdsm.ID = &ID + } + case "name": + if v != nil { + var name string + err = json.Unmarshal(*v, &name) + if err != nil { + return err + } + sdtdsm.Name = &name + } + case "type": + if v != nil { + var typeVar string + err = json.Unmarshal(*v, &typeVar) + if err != nil { + return err + } + sdtdsm.Type = &typeVar + } + } + } + + return nil +} + +// SQLDWTableDataSetMappingProperties properties of the SQL DW table data set mapping. +type SQLDWTableDataSetMappingProperties struct { + // DataSetID - The id of the source data set. + DataSetID *string `json:"dataSetId,omitempty"` + // DataSetMappingStatus - READ-ONLY; Gets the status of the data set mapping. Possible values include: 'Ok', 'Broken' + DataSetMappingStatus DataSetMappingStatus `json:"dataSetMappingStatus,omitempty"` + // DataWarehouseName - DataWarehouse name of the source data set + DataWarehouseName *string `json:"dataWarehouseName,omitempty"` + // ProvisioningState - READ-ONLY; Provisioning state of the data set mapping. Possible values include: 'Succeeded', 'Creating', 'Deleting', 'Moving', 'Failed' + ProvisioningState ProvisioningState `json:"provisioningState,omitempty"` + // SchemaName - Schema of the table. Default value is dbo. + SchemaName *string `json:"schemaName,omitempty"` + // SQLServerResourceID - Resource id of SQL server + SQLServerResourceID *string `json:"sqlServerResourceId,omitempty"` + // TableName - SQL DW table name. + TableName *string `json:"tableName,omitempty"` +} + +// SQLDWTableProperties properties of the SQL DW table data set. +type SQLDWTableProperties struct { + // DataSetID - READ-ONLY; Unique id for identifying a data set resource + DataSetID *string `json:"dataSetId,omitempty"` + // DataWarehouseName - DataWarehouse name of the source data set + DataWarehouseName *string `json:"dataWarehouseName,omitempty"` + // SchemaName - Schema of the table. Default value is dbo. + SchemaName *string `json:"schemaName,omitempty"` + // SQLServerResourceID - Resource id of SQL server + SQLServerResourceID *string `json:"sqlServerResourceId,omitempty"` + // TableName - SQL DW table name. + TableName *string `json:"tableName,omitempty"` +} + +// SynchronizationDetails synchronization details at data set level +type SynchronizationDetails struct { + // DataSetID - READ-ONLY; Id of data set + DataSetID *string `json:"dataSetId,omitempty"` + // DataSetType - READ-ONLY; Type of the data set. Possible values include: 'Blob', 'Container', 'BlobFolder', 'AdlsGen2FileSystem', 'AdlsGen2Folder', 'AdlsGen2File', 'AdlsGen1Folder', 'AdlsGen1File', 'KustoCluster', 'KustoDatabase', 'SQLDBTable', 'SQLDWTable' + DataSetType DataSetType `json:"dataSetType,omitempty"` + // DurationMs - READ-ONLY; Duration of data set level copy + DurationMs *int32 `json:"durationMs,omitempty"` + // EndTime - READ-ONLY; End time of data set level copy + EndTime *date.Time `json:"endTime,omitempty"` + // FilesRead - READ-ONLY; The number of files read from the source data set + FilesRead *int64 `json:"filesRead,omitempty"` + // FilesWritten - READ-ONLY; The number of files written into the sink data set + FilesWritten *int64 `json:"filesWritten,omitempty"` + // Message - READ-ONLY; Error message if any + Message *string `json:"message,omitempty"` + // Name - READ-ONLY; Name of the data set + Name *string `json:"name,omitempty"` + // RowsCopied - READ-ONLY; The number of files copied into the sink data set + RowsCopied *int64 `json:"rowsCopied,omitempty"` + // RowsRead - READ-ONLY; The number of rows read from the source data set. + RowsRead *int64 `json:"rowsRead,omitempty"` + // SizeRead - READ-ONLY; The size of the data read from the source data set in bytes + SizeRead *int64 `json:"sizeRead,omitempty"` + // SizeWritten - READ-ONLY; The size of the data written into the sink data set in bytes + SizeWritten *int64 `json:"sizeWritten,omitempty"` + // StartTime - READ-ONLY; Start time of data set level copy + StartTime *date.Time `json:"startTime,omitempty"` + // Status - READ-ONLY; Raw Status + Status *string `json:"status,omitempty"` + // VCore - READ-ONLY; The vCore units consumed for the data set synchronization + VCore *int64 `json:"vCore,omitempty"` +} + +// SynchronizationDetailsList details of synchronization +type SynchronizationDetailsList struct { + autorest.Response `json:"-"` + // NextLink - The Url of next result page. + NextLink *string `json:"nextLink,omitempty"` + // Value - Collection of items of type DataTransferObjects. + Value *[]SynchronizationDetails `json:"value,omitempty"` +} + +// SynchronizationDetailsListIterator provides access to a complete listing of SynchronizationDetails +// values. +type SynchronizationDetailsListIterator struct { + i int + page SynchronizationDetailsListPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *SynchronizationDetailsListIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SynchronizationDetailsListIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *SynchronizationDetailsListIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter SynchronizationDetailsListIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter SynchronizationDetailsListIterator) Response() SynchronizationDetailsList { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter SynchronizationDetailsListIterator) Value() SynchronizationDetails { + if !iter.page.NotDone() { + return SynchronizationDetails{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the SynchronizationDetailsListIterator type. +func NewSynchronizationDetailsListIterator(page SynchronizationDetailsListPage) SynchronizationDetailsListIterator { + return SynchronizationDetailsListIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (sdl SynchronizationDetailsList) IsEmpty() bool { + return sdl.Value == nil || len(*sdl.Value) == 0 +} + +// synchronizationDetailsListPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (sdl SynchronizationDetailsList) synchronizationDetailsListPreparer(ctx context.Context) (*http.Request, error) { + if sdl.NextLink == nil || len(to.String(sdl.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(sdl.NextLink))) +} + +// SynchronizationDetailsListPage contains a page of SynchronizationDetails values. +type SynchronizationDetailsListPage struct { + fn func(context.Context, SynchronizationDetailsList) (SynchronizationDetailsList, error) + sdl SynchronizationDetailsList +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *SynchronizationDetailsListPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SynchronizationDetailsListPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.sdl) + if err != nil { + return err + } + page.sdl = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *SynchronizationDetailsListPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page SynchronizationDetailsListPage) NotDone() bool { + return !page.sdl.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page SynchronizationDetailsListPage) Response() SynchronizationDetailsList { + return page.sdl +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page SynchronizationDetailsListPage) Values() []SynchronizationDetails { + if page.sdl.IsEmpty() { + return nil + } + return *page.sdl.Value +} + +// Creates a new instance of the SynchronizationDetailsListPage type. +func NewSynchronizationDetailsListPage(getNextPage func(context.Context, SynchronizationDetailsList) (SynchronizationDetailsList, error)) SynchronizationDetailsListPage { + return SynchronizationDetailsListPage{fn: getNextPage} +} + +// BasicSynchronizationSetting a Synchronization Setting data transfer object. +type BasicSynchronizationSetting interface { + AsScheduledSynchronizationSetting() (*ScheduledSynchronizationSetting, bool) + AsSynchronizationSetting() (*SynchronizationSetting, bool) +} + +// SynchronizationSetting a Synchronization Setting data transfer object. +type SynchronizationSetting struct { + autorest.Response `json:"-"` + // Kind - Possible values include: 'KindBasicSynchronizationSettingKindSynchronizationSetting', 'KindBasicSynchronizationSettingKindScheduleBased' + Kind KindBasicSynchronizationSetting `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +func unmarshalBasicSynchronizationSetting(body []byte) (BasicSynchronizationSetting, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["kind"] { + case string(KindBasicSynchronizationSettingKindScheduleBased): + var sss ScheduledSynchronizationSetting + err := json.Unmarshal(body, &sss) + return sss, err + default: + var ss SynchronizationSetting + err := json.Unmarshal(body, &ss) + return ss, err + } +} +func unmarshalBasicSynchronizationSettingArray(body []byte) ([]BasicSynchronizationSetting, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + ssArray := make([]BasicSynchronizationSetting, len(rawMessages)) + + for index, rawMessage := range rawMessages { + ss, err := unmarshalBasicSynchronizationSetting(*rawMessage) + if err != nil { + return nil, err + } + ssArray[index] = ss + } + return ssArray, nil +} + +// MarshalJSON is the custom marshaler for SynchronizationSetting. +func (ss SynchronizationSetting) MarshalJSON() ([]byte, error) { + ss.Kind = KindBasicSynchronizationSettingKindSynchronizationSetting + objectMap := make(map[string]interface{}) + if ss.Kind != "" { + objectMap["kind"] = ss.Kind + } + return json.Marshal(objectMap) +} + +// AsScheduledSynchronizationSetting is the BasicSynchronizationSetting implementation for SynchronizationSetting. +func (ss SynchronizationSetting) AsScheduledSynchronizationSetting() (*ScheduledSynchronizationSetting, bool) { + return nil, false +} + +// AsSynchronizationSetting is the BasicSynchronizationSetting implementation for SynchronizationSetting. +func (ss SynchronizationSetting) AsSynchronizationSetting() (*SynchronizationSetting, bool) { + return &ss, true +} + +// AsBasicSynchronizationSetting is the BasicSynchronizationSetting implementation for SynchronizationSetting. +func (ss SynchronizationSetting) AsBasicSynchronizationSetting() (BasicSynchronizationSetting, bool) { + return &ss, true +} + +// SynchronizationSettingList list response for get Synchronization settings +type SynchronizationSettingList struct { + autorest.Response `json:"-"` + // NextLink - The Url of next result page. + NextLink *string `json:"nextLink,omitempty"` + // Value - Collection of items of type DataTransferObjects. + Value *[]BasicSynchronizationSetting `json:"value,omitempty"` +} + +// UnmarshalJSON is the custom unmarshaler for SynchronizationSettingList struct. +func (ssl *SynchronizationSettingList) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "nextLink": + if v != nil { + var nextLink string + err = json.Unmarshal(*v, &nextLink) + if err != nil { + return err + } + ssl.NextLink = &nextLink + } + case "value": + if v != nil { + value, err := unmarshalBasicSynchronizationSettingArray(*v) + if err != nil { + return err + } + ssl.Value = &value + } + } + } + + return nil +} + +// SynchronizationSettingListIterator provides access to a complete listing of SynchronizationSetting +// values. +type SynchronizationSettingListIterator struct { + i int + page SynchronizationSettingListPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *SynchronizationSettingListIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SynchronizationSettingListIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *SynchronizationSettingListIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter SynchronizationSettingListIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter SynchronizationSettingListIterator) Response() SynchronizationSettingList { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter SynchronizationSettingListIterator) Value() BasicSynchronizationSetting { + if !iter.page.NotDone() { + return SynchronizationSetting{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the SynchronizationSettingListIterator type. +func NewSynchronizationSettingListIterator(page SynchronizationSettingListPage) SynchronizationSettingListIterator { + return SynchronizationSettingListIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (ssl SynchronizationSettingList) IsEmpty() bool { + return ssl.Value == nil || len(*ssl.Value) == 0 +} + +// synchronizationSettingListPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (ssl SynchronizationSettingList) synchronizationSettingListPreparer(ctx context.Context) (*http.Request, error) { + if ssl.NextLink == nil || len(to.String(ssl.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(ssl.NextLink))) +} + +// SynchronizationSettingListPage contains a page of BasicSynchronizationSetting values. +type SynchronizationSettingListPage struct { + fn func(context.Context, SynchronizationSettingList) (SynchronizationSettingList, error) + ssl SynchronizationSettingList +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *SynchronizationSettingListPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SynchronizationSettingListPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.ssl) + if err != nil { + return err + } + page.ssl = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *SynchronizationSettingListPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page SynchronizationSettingListPage) NotDone() bool { + return !page.ssl.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page SynchronizationSettingListPage) Response() SynchronizationSettingList { + return page.ssl +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page SynchronizationSettingListPage) Values() []BasicSynchronizationSetting { + if page.ssl.IsEmpty() { + return nil + } + return *page.ssl.Value +} + +// Creates a new instance of the SynchronizationSettingListPage type. +func NewSynchronizationSettingListPage(getNextPage func(context.Context, SynchronizationSettingList) (SynchronizationSettingList, error)) SynchronizationSettingListPage { + return SynchronizationSettingListPage{fn: getNextPage} +} + +// SynchronizationSettingModel ... +type SynchronizationSettingModel struct { + autorest.Response `json:"-"` + Value BasicSynchronizationSetting `json:"value,omitempty"` +} + +// UnmarshalJSON is the custom unmarshaler for SynchronizationSettingModel struct. +func (ssm *SynchronizationSettingModel) UnmarshalJSON(body []byte) error { + ss, err := unmarshalBasicSynchronizationSetting(body) + if err != nil { + return err + } + ssm.Value = ss + + return nil +} + +// SynchronizationSettingsDeleteFuture an abstraction for monitoring and retrieving the results of a +// long-running operation. +type SynchronizationSettingsDeleteFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *SynchronizationSettingsDeleteFuture) Result(client SynchronizationSettingsClient) (or OperationResponse, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SynchronizationSettingsDeleteFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("datashare.SynchronizationSettingsDeleteFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if or.Response.Response, err = future.GetResult(sender); err == nil && or.Response.Response.StatusCode != http.StatusNoContent { + or, err = client.DeleteResponder(or.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SynchronizationSettingsDeleteFuture", "Result", or.Response.Response, "Failure responding to request") + } + } + return +} + +// Synchronize payload for the synchronizing the data. +type Synchronize struct { + // SynchronizationMode - Mode of synchronization used in triggers and snapshot sync. Incremental by default. Possible values include: 'Incremental', 'FullSync' + SynchronizationMode SynchronizationMode `json:"synchronizationMode,omitempty"` +} + +// BasicTrigger a Trigger data transfer object. +type BasicTrigger interface { + AsScheduledTrigger() (*ScheduledTrigger, bool) + AsTrigger() (*Trigger, bool) +} + +// Trigger a Trigger data transfer object. +type Trigger struct { + autorest.Response `json:"-"` + // Kind - Possible values include: 'KindBasicTriggerKindTrigger', 'KindBasicTriggerKindScheduleBased' + Kind KindBasicTrigger `json:"kind,omitempty"` + // ID - READ-ONLY; The resource id of the azure resource + ID *string `json:"id,omitempty"` + // Name - READ-ONLY; Name of the azure resource + Name *string `json:"name,omitempty"` + // Type - READ-ONLY; Type of the azure resource + Type *string `json:"type,omitempty"` +} + +func unmarshalBasicTrigger(body []byte) (BasicTrigger, error) { + var m map[string]interface{} + err := json.Unmarshal(body, &m) + if err != nil { + return nil, err + } + + switch m["kind"] { + case string(KindBasicTriggerKindScheduleBased): + var st ScheduledTrigger + err := json.Unmarshal(body, &st) + return st, err + default: + var t Trigger + err := json.Unmarshal(body, &t) + return t, err + } +} +func unmarshalBasicTriggerArray(body []byte) ([]BasicTrigger, error) { + var rawMessages []*json.RawMessage + err := json.Unmarshal(body, &rawMessages) + if err != nil { + return nil, err + } + + tArray := make([]BasicTrigger, len(rawMessages)) + + for index, rawMessage := range rawMessages { + t, err := unmarshalBasicTrigger(*rawMessage) + if err != nil { + return nil, err + } + tArray[index] = t + } + return tArray, nil +} + +// MarshalJSON is the custom marshaler for Trigger. +func (t Trigger) MarshalJSON() ([]byte, error) { + t.Kind = KindBasicTriggerKindTrigger + objectMap := make(map[string]interface{}) + if t.Kind != "" { + objectMap["kind"] = t.Kind + } + return json.Marshal(objectMap) +} + +// AsScheduledTrigger is the BasicTrigger implementation for Trigger. +func (t Trigger) AsScheduledTrigger() (*ScheduledTrigger, bool) { + return nil, false +} + +// AsTrigger is the BasicTrigger implementation for Trigger. +func (t Trigger) AsTrigger() (*Trigger, bool) { + return &t, true +} + +// AsBasicTrigger is the BasicTrigger implementation for Trigger. +func (t Trigger) AsBasicTrigger() (BasicTrigger, bool) { + return &t, true +} + +// TriggerList list response for get triggers +type TriggerList struct { + autorest.Response `json:"-"` + // NextLink - The Url of next result page. + NextLink *string `json:"nextLink,omitempty"` + // Value - Collection of items of type DataTransferObjects. + Value *[]BasicTrigger `json:"value,omitempty"` +} + +// UnmarshalJSON is the custom unmarshaler for TriggerList struct. +func (tl *TriggerList) UnmarshalJSON(body []byte) error { + var m map[string]*json.RawMessage + err := json.Unmarshal(body, &m) + if err != nil { + return err + } + for k, v := range m { + switch k { + case "nextLink": + if v != nil { + var nextLink string + err = json.Unmarshal(*v, &nextLink) + if err != nil { + return err + } + tl.NextLink = &nextLink + } + case "value": + if v != nil { + value, err := unmarshalBasicTriggerArray(*v) + if err != nil { + return err + } + tl.Value = &value + } + } + } + + return nil +} + +// TriggerListIterator provides access to a complete listing of Trigger values. +type TriggerListIterator struct { + i int + page TriggerListPage +} + +// NextWithContext advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +func (iter *TriggerListIterator) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/TriggerListIterator.NextWithContext") + defer func() { + sc := -1 + if iter.Response().Response.Response != nil { + sc = iter.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + iter.i++ + if iter.i < len(iter.page.Values()) { + return nil + } + err = iter.page.NextWithContext(ctx) + if err != nil { + iter.i-- + return err + } + iter.i = 0 + return nil +} + +// Next advances to the next value. If there was an error making +// the request the iterator does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (iter *TriggerListIterator) Next() error { + return iter.NextWithContext(context.Background()) +} + +// NotDone returns true if the enumeration should be started or is not yet complete. +func (iter TriggerListIterator) NotDone() bool { + return iter.page.NotDone() && iter.i < len(iter.page.Values()) +} + +// Response returns the raw server response from the last page request. +func (iter TriggerListIterator) Response() TriggerList { + return iter.page.Response() +} + +// Value returns the current value or a zero-initialized value if the +// iterator has advanced beyond the end of the collection. +func (iter TriggerListIterator) Value() BasicTrigger { + if !iter.page.NotDone() { + return Trigger{} + } + return iter.page.Values()[iter.i] +} + +// Creates a new instance of the TriggerListIterator type. +func NewTriggerListIterator(page TriggerListPage) TriggerListIterator { + return TriggerListIterator{page: page} +} + +// IsEmpty returns true if the ListResult contains no values. +func (tl TriggerList) IsEmpty() bool { + return tl.Value == nil || len(*tl.Value) == 0 +} + +// triggerListPreparer prepares a request to retrieve the next set of results. +// It returns nil if no more results exist. +func (tl TriggerList) triggerListPreparer(ctx context.Context) (*http.Request, error) { + if tl.NextLink == nil || len(to.String(tl.NextLink)) < 1 { + return nil, nil + } + return autorest.Prepare((&http.Request{}).WithContext(ctx), + autorest.AsJSON(), + autorest.AsGet(), + autorest.WithBaseURL(to.String(tl.NextLink))) +} + +// TriggerListPage contains a page of BasicTrigger values. +type TriggerListPage struct { + fn func(context.Context, TriggerList) (TriggerList, error) + tl TriggerList +} + +// NextWithContext advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +func (page *TriggerListPage) NextWithContext(ctx context.Context) (err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/TriggerListPage.NextWithContext") + defer func() { + sc := -1 + if page.Response().Response.Response != nil { + sc = page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + next, err := page.fn(ctx, page.tl) + if err != nil { + return err + } + page.tl = next + return nil +} + +// Next advances to the next page of values. If there was an error making +// the request the page does not advance and the error is returned. +// Deprecated: Use NextWithContext() instead. +func (page *TriggerListPage) Next() error { + return page.NextWithContext(context.Background()) +} + +// NotDone returns true if the page enumeration should be started or is not yet complete. +func (page TriggerListPage) NotDone() bool { + return !page.tl.IsEmpty() +} + +// Response returns the raw server response from the last page request. +func (page TriggerListPage) Response() TriggerList { + return page.tl +} + +// Values returns the slice of values for the current page or nil if there are no values. +func (page TriggerListPage) Values() []BasicTrigger { + if page.tl.IsEmpty() { + return nil + } + return *page.tl.Value +} + +// Creates a new instance of the TriggerListPage type. +func NewTriggerListPage(getNextPage func(context.Context, TriggerList) (TriggerList, error)) TriggerListPage { + return TriggerListPage{fn: getNextPage} +} + +// TriggerModel ... +type TriggerModel struct { + autorest.Response `json:"-"` + Value BasicTrigger `json:"value,omitempty"` +} + +// UnmarshalJSON is the custom unmarshaler for TriggerModel struct. +func (tm *TriggerModel) UnmarshalJSON(body []byte) error { + t, err := unmarshalBasicTrigger(body) + if err != nil { + return err + } + tm.Value = t + + return nil +} + +// TriggersCreateFuture an abstraction for monitoring and retrieving the results of a long-running +// operation. +type TriggersCreateFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *TriggersCreateFuture) Result(client TriggersClient) (tm TriggerModel, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.TriggersCreateFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("datashare.TriggersCreateFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if tm.Response.Response, err = future.GetResult(sender); err == nil && tm.Response.Response.StatusCode != http.StatusNoContent { + tm, err = client.CreateResponder(tm.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.TriggersCreateFuture", "Result", tm.Response.Response, "Failure responding to request") + } + } + return +} + +// TriggersDeleteFuture an abstraction for monitoring and retrieving the results of a long-running +// operation. +type TriggersDeleteFuture struct { + azure.Future +} + +// Result returns the result of the asynchronous operation. +// If the operation has not completed it will return an error. +func (future *TriggersDeleteFuture) Result(client TriggersClient) (or OperationResponse, err error) { + var done bool + done, err = future.DoneWithContext(context.Background(), client) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.TriggersDeleteFuture", "Result", future.Response(), "Polling failure") + return + } + if !done { + err = azure.NewAsyncOpIncompleteError("datashare.TriggersDeleteFuture") + return + } + sender := autorest.DecorateSender(client, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) + if or.Response.Response, err = future.GetResult(sender); err == nil && or.Response.Response.StatusCode != http.StatusNoContent { + or, err = client.DeleteResponder(or.Response.Response) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.TriggersDeleteFuture", "Result", or.Response.Response, "Failure responding to request") + } + } + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/operations.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/operations.go new file mode 100644 index 000000000000..5c6f040f8b77 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/operations.go @@ -0,0 +1,147 @@ +package datashare + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// OperationsClient is the creates a Microsoft.DataShare management client. +type OperationsClient struct { + BaseClient +} + +// NewOperationsClient creates an instance of the OperationsClient client. +func NewOperationsClient(subscriptionID string) OperationsClient { + return NewOperationsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewOperationsClientWithBaseURI creates an instance of the OperationsClient client using a custom endpoint. Use this +// when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). +func NewOperationsClientWithBaseURI(baseURI string, subscriptionID string) OperationsClient { + return OperationsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// List list of available operations +func (client OperationsClient) List(ctx context.Context) (result OperationListPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OperationsClient.List") + defer func() { + sc := -1 + if result.ol.Response.Response != nil { + sc = result.ol.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listNextResults + req, err := client.ListPreparer(ctx) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.OperationsClient", "List", nil, "Failure preparing request") + return + } + + resp, err := client.ListSender(req) + if err != nil { + result.ol.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.OperationsClient", "List", resp, "Failure sending request") + return + } + + result.ol, err = client.ListResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.OperationsClient", "List", resp, "Failure responding to request") + } + + return +} + +// ListPreparer prepares the List request. +func (client OperationsClient) ListPreparer(ctx context.Context) (*http.Request, error) { + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPath("/providers/Microsoft.DataShare/operations"), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListSender sends the List request. The method will close the +// http.Response Body if it receives an error. +func (client OperationsClient) ListSender(req *http.Request) (*http.Response, error) { + return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...)) +} + +// ListResponder handles the response to the List request. The method always +// closes the http.Response Body. +func (client OperationsClient) ListResponder(resp *http.Response) (result OperationList, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listNextResults retrieves the next set of results, if any. +func (client OperationsClient) listNextResults(ctx context.Context, lastResults OperationList) (result OperationList, err error) { + req, err := lastResults.operationListPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "datashare.OperationsClient", "listNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "datashare.OperationsClient", "listNextResults", resp, "Failure sending next results request") + } + result, err = client.ListResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.OperationsClient", "listNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListComplete enumerates all values, automatically crossing page boundaries as required. +func (client OperationsClient) ListComplete(ctx context.Context) (result OperationListIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/OperationsClient.List") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.List(ctx) + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/providersharesubscriptions.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/providersharesubscriptions.go new file mode 100644 index 000000000000..49babfc08edf --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/providersharesubscriptions.go @@ -0,0 +1,403 @@ +package datashare + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// ProviderShareSubscriptionsClient is the creates a Microsoft.DataShare management client. +type ProviderShareSubscriptionsClient struct { + BaseClient +} + +// NewProviderShareSubscriptionsClient creates an instance of the ProviderShareSubscriptionsClient client. +func NewProviderShareSubscriptionsClient(subscriptionID string) ProviderShareSubscriptionsClient { + return NewProviderShareSubscriptionsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewProviderShareSubscriptionsClientWithBaseURI creates an instance of the ProviderShareSubscriptionsClient client +// using a custom endpoint. Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign +// clouds, Azure stack). +func NewProviderShareSubscriptionsClientWithBaseURI(baseURI string, subscriptionID string) ProviderShareSubscriptionsClient { + return ProviderShareSubscriptionsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// GetByShare get share subscription in a provider share +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share. +// providerShareSubscriptionID - to locate shareSubscription +func (client ProviderShareSubscriptionsClient) GetByShare(ctx context.Context, resourceGroupName string, accountName string, shareName string, providerShareSubscriptionID string) (result ProviderShareSubscription, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ProviderShareSubscriptionsClient.GetByShare") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.GetBySharePreparer(ctx, resourceGroupName, accountName, shareName, providerShareSubscriptionID) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ProviderShareSubscriptionsClient", "GetByShare", nil, "Failure preparing request") + return + } + + resp, err := client.GetByShareSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.ProviderShareSubscriptionsClient", "GetByShare", resp, "Failure sending request") + return + } + + result, err = client.GetByShareResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ProviderShareSubscriptionsClient", "GetByShare", resp, "Failure responding to request") + } + + return +} + +// GetBySharePreparer prepares the GetByShare request. +func (client ProviderShareSubscriptionsClient) GetBySharePreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string, providerShareSubscriptionID string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "providerShareSubscriptionId": autorest.Encode("path", providerShareSubscriptionID), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetByShareSender sends the GetByShare request. The method will close the +// http.Response Body if it receives an error. +func (client ProviderShareSubscriptionsClient) GetByShareSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// GetByShareResponder handles the response to the GetByShare request. The method always +// closes the http.Response Body. +func (client ProviderShareSubscriptionsClient) GetByShareResponder(resp *http.Response) (result ProviderShareSubscription, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByShare list share subscriptions in a provider share +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share. +// skipToken - continuation Token +func (client ProviderShareSubscriptionsClient) ListByShare(ctx context.Context, resourceGroupName string, accountName string, shareName string, skipToken string) (result ProviderShareSubscriptionListPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ProviderShareSubscriptionsClient.ListByShare") + defer func() { + sc := -1 + if result.pssl.Response.Response != nil { + sc = result.pssl.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listByShareNextResults + req, err := client.ListBySharePreparer(ctx, resourceGroupName, accountName, shareName, skipToken) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ProviderShareSubscriptionsClient", "ListByShare", nil, "Failure preparing request") + return + } + + resp, err := client.ListByShareSender(req) + if err != nil { + result.pssl.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.ProviderShareSubscriptionsClient", "ListByShare", resp, "Failure sending request") + return + } + + result.pssl, err = client.ListByShareResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ProviderShareSubscriptionsClient", "ListByShare", resp, "Failure responding to request") + } + + return +} + +// ListBySharePreparer prepares the ListByShare request. +func (client ProviderShareSubscriptionsClient) ListBySharePreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string, skipToken string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(skipToken) > 0 { + queryParameters["$skipToken"] = autorest.Encode("query", skipToken) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByShareSender sends the ListByShare request. The method will close the +// http.Response Body if it receives an error. +func (client ProviderShareSubscriptionsClient) ListByShareSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListByShareResponder handles the response to the ListByShare request. The method always +// closes the http.Response Body. +func (client ProviderShareSubscriptionsClient) ListByShareResponder(resp *http.Response) (result ProviderShareSubscriptionList, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByShareNextResults retrieves the next set of results, if any. +func (client ProviderShareSubscriptionsClient) listByShareNextResults(ctx context.Context, lastResults ProviderShareSubscriptionList) (result ProviderShareSubscriptionList, err error) { + req, err := lastResults.providerShareSubscriptionListPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "datashare.ProviderShareSubscriptionsClient", "listByShareNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByShareSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "datashare.ProviderShareSubscriptionsClient", "listByShareNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByShareResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ProviderShareSubscriptionsClient", "listByShareNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByShareComplete enumerates all values, automatically crossing page boundaries as required. +func (client ProviderShareSubscriptionsClient) ListByShareComplete(ctx context.Context, resourceGroupName string, accountName string, shareName string, skipToken string) (result ProviderShareSubscriptionListIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ProviderShareSubscriptionsClient.ListByShare") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListByShare(ctx, resourceGroupName, accountName, shareName, skipToken) + return +} + +// Reinstate reinstate share subscription in a provider share +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share. +// providerShareSubscriptionID - to locate shareSubscription +func (client ProviderShareSubscriptionsClient) Reinstate(ctx context.Context, resourceGroupName string, accountName string, shareName string, providerShareSubscriptionID string) (result ProviderShareSubscription, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ProviderShareSubscriptionsClient.Reinstate") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.ReinstatePreparer(ctx, resourceGroupName, accountName, shareName, providerShareSubscriptionID) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ProviderShareSubscriptionsClient", "Reinstate", nil, "Failure preparing request") + return + } + + resp, err := client.ReinstateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.ProviderShareSubscriptionsClient", "Reinstate", resp, "Failure sending request") + return + } + + result, err = client.ReinstateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ProviderShareSubscriptionsClient", "Reinstate", resp, "Failure responding to request") + } + + return +} + +// ReinstatePreparer prepares the Reinstate request. +func (client ProviderShareSubscriptionsClient) ReinstatePreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string, providerShareSubscriptionID string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "providerShareSubscriptionId": autorest.Encode("path", providerShareSubscriptionID), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}/reinstate", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ReinstateSender sends the Reinstate request. The method will close the +// http.Response Body if it receives an error. +func (client ProviderShareSubscriptionsClient) ReinstateSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ReinstateResponder handles the response to the Reinstate request. The method always +// closes the http.Response Body. +func (client ProviderShareSubscriptionsClient) ReinstateResponder(resp *http.Response) (result ProviderShareSubscription, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Revoke revoke share subscription in a provider share +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share. +// providerShareSubscriptionID - to locate shareSubscription +func (client ProviderShareSubscriptionsClient) Revoke(ctx context.Context, resourceGroupName string, accountName string, shareName string, providerShareSubscriptionID string) (result ProviderShareSubscriptionsRevokeFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ProviderShareSubscriptionsClient.Revoke") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.RevokePreparer(ctx, resourceGroupName, accountName, shareName, providerShareSubscriptionID) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ProviderShareSubscriptionsClient", "Revoke", nil, "Failure preparing request") + return + } + + result, err = client.RevokeSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ProviderShareSubscriptionsClient", "Revoke", result.Response(), "Failure sending request") + return + } + + return +} + +// RevokePreparer prepares the Revoke request. +func (client ProviderShareSubscriptionsClient) RevokePreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string, providerShareSubscriptionID string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "providerShareSubscriptionId": autorest.Encode("path", providerShareSubscriptionID), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/providerShareSubscriptions/{providerShareSubscriptionId}/revoke", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// RevokeSender sends the Revoke request. The method will close the +// http.Response Body if it receives an error. +func (client ProviderShareSubscriptionsClient) RevokeSender(req *http.Request) (future ProviderShareSubscriptionsRevokeFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// RevokeResponder handles the response to the Revoke request. The method always +// closes the http.Response Body. +func (client ProviderShareSubscriptionsClient) RevokeResponder(resp *http.Response) (result ProviderShareSubscription, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/shares.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/shares.go new file mode 100644 index 000000000000..cba977a74913 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/shares.go @@ -0,0 +1,641 @@ +package datashare + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// SharesClient is the creates a Microsoft.DataShare management client. +type SharesClient struct { + BaseClient +} + +// NewSharesClient creates an instance of the SharesClient client. +func NewSharesClient(subscriptionID string) SharesClient { + return NewSharesClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewSharesClientWithBaseURI creates an instance of the SharesClient client using a custom endpoint. Use this when +// interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). +func NewSharesClientWithBaseURI(baseURI string, subscriptionID string) SharesClient { + return SharesClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// Create create a share +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share. +// share - the share payload +func (client SharesClient) Create(ctx context.Context, resourceGroupName string, accountName string, shareName string, share Share) (result Share, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SharesClient.Create") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.CreatePreparer(ctx, resourceGroupName, accountName, shareName, share) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SharesClient", "Create", nil, "Failure preparing request") + return + } + + resp, err := client.CreateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.SharesClient", "Create", resp, "Failure sending request") + return + } + + result, err = client.CreateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SharesClient", "Create", resp, "Failure responding to request") + } + + return +} + +// CreatePreparer prepares the Create request. +func (client SharesClient) CreatePreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string, share Share) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}", pathParameters), + autorest.WithJSON(share), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateSender sends the Create request. The method will close the +// http.Response Body if it receives an error. +func (client SharesClient) CreateSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// CreateResponder handles the response to the Create request. The method always +// closes the http.Response Body. +func (client SharesClient) CreateResponder(resp *http.Response) (result Share, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete delete a share +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share. +func (client SharesClient) Delete(ctx context.Context, resourceGroupName string, accountName string, shareName string) (result SharesDeleteFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SharesClient.Delete") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.DeletePreparer(ctx, resourceGroupName, accountName, shareName) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SharesClient", "Delete", nil, "Failure preparing request") + return + } + + result, err = client.DeleteSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SharesClient", "Delete", result.Response(), "Failure sending request") + return + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client SharesClient) DeletePreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client SharesClient) DeleteSender(req *http.Request) (future SharesDeleteFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client SharesClient) DeleteResponder(resp *http.Response) (result OperationResponse, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Get get a share +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share to retrieve. +func (client SharesClient) Get(ctx context.Context, resourceGroupName string, accountName string, shareName string) (result Share, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SharesClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.GetPreparer(ctx, resourceGroupName, accountName, shareName) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SharesClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.SharesClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SharesClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client SharesClient) GetPreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client SharesClient) GetSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client SharesClient) GetResponder(resp *http.Response) (result Share, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByAccount list shares in an account +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// skipToken - continuation Token +func (client SharesClient) ListByAccount(ctx context.Context, resourceGroupName string, accountName string, skipToken string) (result ShareListPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SharesClient.ListByAccount") + defer func() { + sc := -1 + if result.sl.Response.Response != nil { + sc = result.sl.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listByAccountNextResults + req, err := client.ListByAccountPreparer(ctx, resourceGroupName, accountName, skipToken) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SharesClient", "ListByAccount", nil, "Failure preparing request") + return + } + + resp, err := client.ListByAccountSender(req) + if err != nil { + result.sl.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.SharesClient", "ListByAccount", resp, "Failure sending request") + return + } + + result.sl, err = client.ListByAccountResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SharesClient", "ListByAccount", resp, "Failure responding to request") + } + + return +} + +// ListByAccountPreparer prepares the ListByAccount request. +func (client SharesClient) ListByAccountPreparer(ctx context.Context, resourceGroupName string, accountName string, skipToken string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(skipToken) > 0 { + queryParameters["$skipToken"] = autorest.Encode("query", skipToken) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByAccountSender sends the ListByAccount request. The method will close the +// http.Response Body if it receives an error. +func (client SharesClient) ListByAccountSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListByAccountResponder handles the response to the ListByAccount request. The method always +// closes the http.Response Body. +func (client SharesClient) ListByAccountResponder(resp *http.Response) (result ShareList, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByAccountNextResults retrieves the next set of results, if any. +func (client SharesClient) listByAccountNextResults(ctx context.Context, lastResults ShareList) (result ShareList, err error) { + req, err := lastResults.shareListPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "datashare.SharesClient", "listByAccountNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByAccountSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "datashare.SharesClient", "listByAccountNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByAccountResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SharesClient", "listByAccountNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByAccountComplete enumerates all values, automatically crossing page boundaries as required. +func (client SharesClient) ListByAccountComplete(ctx context.Context, resourceGroupName string, accountName string, skipToken string) (result ShareListIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SharesClient.ListByAccount") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListByAccount(ctx, resourceGroupName, accountName, skipToken) + return +} + +// ListSynchronizationDetails list synchronization details +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share. +// shareSynchronization - share Synchronization payload. +// skipToken - continuation token +func (client SharesClient) ListSynchronizationDetails(ctx context.Context, resourceGroupName string, accountName string, shareName string, shareSynchronization ShareSynchronization, skipToken string) (result SynchronizationDetailsListPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SharesClient.ListSynchronizationDetails") + defer func() { + sc := -1 + if result.sdl.Response.Response != nil { + sc = result.sdl.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listSynchronizationDetailsNextResults + req, err := client.ListSynchronizationDetailsPreparer(ctx, resourceGroupName, accountName, shareName, shareSynchronization, skipToken) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SharesClient", "ListSynchronizationDetails", nil, "Failure preparing request") + return + } + + resp, err := client.ListSynchronizationDetailsSender(req) + if err != nil { + result.sdl.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.SharesClient", "ListSynchronizationDetails", resp, "Failure sending request") + return + } + + result.sdl, err = client.ListSynchronizationDetailsResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SharesClient", "ListSynchronizationDetails", resp, "Failure responding to request") + } + + return +} + +// ListSynchronizationDetailsPreparer prepares the ListSynchronizationDetails request. +func (client SharesClient) ListSynchronizationDetailsPreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string, shareSynchronization ShareSynchronization, skipToken string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(skipToken) > 0 { + queryParameters["$skipToken"] = autorest.Encode("query", skipToken) + } + + shareSynchronization.SynchronizationMode = "" + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/listSynchronizationDetails", pathParameters), + autorest.WithJSON(shareSynchronization), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListSynchronizationDetailsSender sends the ListSynchronizationDetails request. The method will close the +// http.Response Body if it receives an error. +func (client SharesClient) ListSynchronizationDetailsSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListSynchronizationDetailsResponder handles the response to the ListSynchronizationDetails request. The method always +// closes the http.Response Body. +func (client SharesClient) ListSynchronizationDetailsResponder(resp *http.Response) (result SynchronizationDetailsList, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listSynchronizationDetailsNextResults retrieves the next set of results, if any. +func (client SharesClient) listSynchronizationDetailsNextResults(ctx context.Context, lastResults SynchronizationDetailsList) (result SynchronizationDetailsList, err error) { + req, err := lastResults.synchronizationDetailsListPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "datashare.SharesClient", "listSynchronizationDetailsNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListSynchronizationDetailsSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "datashare.SharesClient", "listSynchronizationDetailsNextResults", resp, "Failure sending next results request") + } + result, err = client.ListSynchronizationDetailsResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SharesClient", "listSynchronizationDetailsNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListSynchronizationDetailsComplete enumerates all values, automatically crossing page boundaries as required. +func (client SharesClient) ListSynchronizationDetailsComplete(ctx context.Context, resourceGroupName string, accountName string, shareName string, shareSynchronization ShareSynchronization, skipToken string) (result SynchronizationDetailsListIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SharesClient.ListSynchronizationDetails") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListSynchronizationDetails(ctx, resourceGroupName, accountName, shareName, shareSynchronization, skipToken) + return +} + +// ListSynchronizations list synchronizations of a share +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share. +// skipToken - continuation token +func (client SharesClient) ListSynchronizations(ctx context.Context, resourceGroupName string, accountName string, shareName string, skipToken string) (result ShareSynchronizationListPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SharesClient.ListSynchronizations") + defer func() { + sc := -1 + if result.ssl.Response.Response != nil { + sc = result.ssl.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listSynchronizationsNextResults + req, err := client.ListSynchronizationsPreparer(ctx, resourceGroupName, accountName, shareName, skipToken) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SharesClient", "ListSynchronizations", nil, "Failure preparing request") + return + } + + resp, err := client.ListSynchronizationsSender(req) + if err != nil { + result.ssl.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.SharesClient", "ListSynchronizations", resp, "Failure sending request") + return + } + + result.ssl, err = client.ListSynchronizationsResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SharesClient", "ListSynchronizations", resp, "Failure responding to request") + } + + return +} + +// ListSynchronizationsPreparer prepares the ListSynchronizations request. +func (client SharesClient) ListSynchronizationsPreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string, skipToken string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(skipToken) > 0 { + queryParameters["$skipToken"] = autorest.Encode("query", skipToken) + } + + preparer := autorest.CreatePreparer( + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/listSynchronizations", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListSynchronizationsSender sends the ListSynchronizations request. The method will close the +// http.Response Body if it receives an error. +func (client SharesClient) ListSynchronizationsSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListSynchronizationsResponder handles the response to the ListSynchronizations request. The method always +// closes the http.Response Body. +func (client SharesClient) ListSynchronizationsResponder(resp *http.Response) (result ShareSynchronizationList, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listSynchronizationsNextResults retrieves the next set of results, if any. +func (client SharesClient) listSynchronizationsNextResults(ctx context.Context, lastResults ShareSynchronizationList) (result ShareSynchronizationList, err error) { + req, err := lastResults.shareSynchronizationListPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "datashare.SharesClient", "listSynchronizationsNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListSynchronizationsSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "datashare.SharesClient", "listSynchronizationsNextResults", resp, "Failure sending next results request") + } + result, err = client.ListSynchronizationsResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SharesClient", "listSynchronizationsNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListSynchronizationsComplete enumerates all values, automatically crossing page boundaries as required. +func (client SharesClient) ListSynchronizationsComplete(ctx context.Context, resourceGroupName string, accountName string, shareName string, skipToken string) (result ShareSynchronizationListIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SharesClient.ListSynchronizations") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListSynchronizations(ctx, resourceGroupName, accountName, shareName, skipToken) + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/sharesubscriptions.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/sharesubscriptions.go new file mode 100644 index 000000000000..9ad7e7b42e63 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/sharesubscriptions.go @@ -0,0 +1,957 @@ +package datashare + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/autorest/validation" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// ShareSubscriptionsClient is the creates a Microsoft.DataShare management client. +type ShareSubscriptionsClient struct { + BaseClient +} + +// NewShareSubscriptionsClient creates an instance of the ShareSubscriptionsClient client. +func NewShareSubscriptionsClient(subscriptionID string) ShareSubscriptionsClient { + return NewShareSubscriptionsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewShareSubscriptionsClientWithBaseURI creates an instance of the ShareSubscriptionsClient client using a custom +// endpoint. Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure +// stack). +func NewShareSubscriptionsClientWithBaseURI(baseURI string, subscriptionID string) ShareSubscriptionsClient { + return ShareSubscriptionsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// CancelSynchronization request to cancel a synchronization. +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareSubscriptionName - the name of the shareSubscription. +// shareSubscriptionSynchronization - share Subscription Synchronization payload. +func (client ShareSubscriptionsClient) CancelSynchronization(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, shareSubscriptionSynchronization ShareSubscriptionSynchronization) (result ShareSubscriptionsCancelSynchronizationFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareSubscriptionsClient.CancelSynchronization") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: shareSubscriptionSynchronization, + Constraints: []validation.Constraint{{Target: "shareSubscriptionSynchronization.SynchronizationID", Name: validation.Null, Rule: true, Chain: nil}}}}); err != nil { + return result, validation.NewError("datashare.ShareSubscriptionsClient", "CancelSynchronization", err.Error()) + } + + req, err := client.CancelSynchronizationPreparer(ctx, resourceGroupName, accountName, shareSubscriptionName, shareSubscriptionSynchronization) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "CancelSynchronization", nil, "Failure preparing request") + return + } + + result, err = client.CancelSynchronizationSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "CancelSynchronization", result.Response(), "Failure sending request") + return + } + + return +} + +// CancelSynchronizationPreparer prepares the CancelSynchronization request. +func (client ShareSubscriptionsClient) CancelSynchronizationPreparer(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, shareSubscriptionSynchronization ShareSubscriptionSynchronization) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareSubscriptionName": autorest.Encode("path", shareSubscriptionName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + shareSubscriptionSynchronization.DurationMs = nil + shareSubscriptionSynchronization.EndTime = nil + shareSubscriptionSynchronization.Message = nil + shareSubscriptionSynchronization.StartTime = nil + shareSubscriptionSynchronization.Status = nil + shareSubscriptionSynchronization.SynchronizationMode = "" + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/cancelSynchronization", pathParameters), + autorest.WithJSON(shareSubscriptionSynchronization), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CancelSynchronizationSender sends the CancelSynchronization request. The method will close the +// http.Response Body if it receives an error. +func (client ShareSubscriptionsClient) CancelSynchronizationSender(req *http.Request) (future ShareSubscriptionsCancelSynchronizationFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// CancelSynchronizationResponder handles the response to the CancelSynchronization request. The method always +// closes the http.Response Body. +func (client ShareSubscriptionsClient) CancelSynchronizationResponder(resp *http.Response) (result ShareSubscriptionSynchronization, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Create create a shareSubscription in an account +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareSubscriptionName - the name of the shareSubscription. +// shareSubscription - create parameters for shareSubscription +func (client ShareSubscriptionsClient) Create(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, shareSubscription ShareSubscription) (result ShareSubscription, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareSubscriptionsClient.Create") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: shareSubscription, + Constraints: []validation.Constraint{{Target: "shareSubscription.ShareSubscriptionProperties", Name: validation.Null, Rule: true, + Chain: []validation.Constraint{{Target: "shareSubscription.ShareSubscriptionProperties.InvitationID", Name: validation.Null, Rule: true, Chain: nil}, + {Target: "shareSubscription.ShareSubscriptionProperties.SourceShareLocation", Name: validation.Null, Rule: true, Chain: nil}, + }}}}}); err != nil { + return result, validation.NewError("datashare.ShareSubscriptionsClient", "Create", err.Error()) + } + + req, err := client.CreatePreparer(ctx, resourceGroupName, accountName, shareSubscriptionName, shareSubscription) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "Create", nil, "Failure preparing request") + return + } + + resp, err := client.CreateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "Create", resp, "Failure sending request") + return + } + + result, err = client.CreateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "Create", resp, "Failure responding to request") + } + + return +} + +// CreatePreparer prepares the Create request. +func (client ShareSubscriptionsClient) CreatePreparer(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, shareSubscription ShareSubscription) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareSubscriptionName": autorest.Encode("path", shareSubscriptionName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}", pathParameters), + autorest.WithJSON(shareSubscription), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateSender sends the Create request. The method will close the +// http.Response Body if it receives an error. +func (client ShareSubscriptionsClient) CreateSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// CreateResponder handles the response to the Create request. The method always +// closes the http.Response Body. +func (client ShareSubscriptionsClient) CreateResponder(resp *http.Response) (result ShareSubscription, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete delete a shareSubscription in an account +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareSubscriptionName - the name of the shareSubscription. +func (client ShareSubscriptionsClient) Delete(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string) (result ShareSubscriptionsDeleteFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareSubscriptionsClient.Delete") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.DeletePreparer(ctx, resourceGroupName, accountName, shareSubscriptionName) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "Delete", nil, "Failure preparing request") + return + } + + result, err = client.DeleteSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "Delete", result.Response(), "Failure sending request") + return + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client ShareSubscriptionsClient) DeletePreparer(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareSubscriptionName": autorest.Encode("path", shareSubscriptionName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client ShareSubscriptionsClient) DeleteSender(req *http.Request) (future ShareSubscriptionsDeleteFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client ShareSubscriptionsClient) DeleteResponder(resp *http.Response) (result OperationResponse, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Get get a shareSubscription in an account +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareSubscriptionName - the name of the shareSubscription. +func (client ShareSubscriptionsClient) Get(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string) (result ShareSubscription, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareSubscriptionsClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.GetPreparer(ctx, resourceGroupName, accountName, shareSubscriptionName) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client ShareSubscriptionsClient) GetPreparer(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareSubscriptionName": autorest.Encode("path", shareSubscriptionName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client ShareSubscriptionsClient) GetSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client ShareSubscriptionsClient) GetResponder(resp *http.Response) (result ShareSubscription, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByAccount list share subscriptions in an account +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// skipToken - continuation Token +func (client ShareSubscriptionsClient) ListByAccount(ctx context.Context, resourceGroupName string, accountName string, skipToken string) (result ShareSubscriptionListPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareSubscriptionsClient.ListByAccount") + defer func() { + sc := -1 + if result.ssl.Response.Response != nil { + sc = result.ssl.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listByAccountNextResults + req, err := client.ListByAccountPreparer(ctx, resourceGroupName, accountName, skipToken) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "ListByAccount", nil, "Failure preparing request") + return + } + + resp, err := client.ListByAccountSender(req) + if err != nil { + result.ssl.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "ListByAccount", resp, "Failure sending request") + return + } + + result.ssl, err = client.ListByAccountResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "ListByAccount", resp, "Failure responding to request") + } + + return +} + +// ListByAccountPreparer prepares the ListByAccount request. +func (client ShareSubscriptionsClient) ListByAccountPreparer(ctx context.Context, resourceGroupName string, accountName string, skipToken string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(skipToken) > 0 { + queryParameters["$skipToken"] = autorest.Encode("query", skipToken) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByAccountSender sends the ListByAccount request. The method will close the +// http.Response Body if it receives an error. +func (client ShareSubscriptionsClient) ListByAccountSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListByAccountResponder handles the response to the ListByAccount request. The method always +// closes the http.Response Body. +func (client ShareSubscriptionsClient) ListByAccountResponder(resp *http.Response) (result ShareSubscriptionList, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByAccountNextResults retrieves the next set of results, if any. +func (client ShareSubscriptionsClient) listByAccountNextResults(ctx context.Context, lastResults ShareSubscriptionList) (result ShareSubscriptionList, err error) { + req, err := lastResults.shareSubscriptionListPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "listByAccountNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByAccountSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "listByAccountNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByAccountResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "listByAccountNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByAccountComplete enumerates all values, automatically crossing page boundaries as required. +func (client ShareSubscriptionsClient) ListByAccountComplete(ctx context.Context, resourceGroupName string, accountName string, skipToken string) (result ShareSubscriptionListIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareSubscriptionsClient.ListByAccount") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListByAccount(ctx, resourceGroupName, accountName, skipToken) + return +} + +// ListSourceShareSynchronizationSettings get synchronization settings set on a share +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareSubscriptionName - the name of the shareSubscription. +// skipToken - continuation token +func (client ShareSubscriptionsClient) ListSourceShareSynchronizationSettings(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, skipToken string) (result SourceShareSynchronizationSettingListPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareSubscriptionsClient.ListSourceShareSynchronizationSettings") + defer func() { + sc := -1 + if result.ssssl.Response.Response != nil { + sc = result.ssssl.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listSourceShareSynchronizationSettingsNextResults + req, err := client.ListSourceShareSynchronizationSettingsPreparer(ctx, resourceGroupName, accountName, shareSubscriptionName, skipToken) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "ListSourceShareSynchronizationSettings", nil, "Failure preparing request") + return + } + + resp, err := client.ListSourceShareSynchronizationSettingsSender(req) + if err != nil { + result.ssssl.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "ListSourceShareSynchronizationSettings", resp, "Failure sending request") + return + } + + result.ssssl, err = client.ListSourceShareSynchronizationSettingsResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "ListSourceShareSynchronizationSettings", resp, "Failure responding to request") + } + + return +} + +// ListSourceShareSynchronizationSettingsPreparer prepares the ListSourceShareSynchronizationSettings request. +func (client ShareSubscriptionsClient) ListSourceShareSynchronizationSettingsPreparer(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, skipToken string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareSubscriptionName": autorest.Encode("path", shareSubscriptionName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(skipToken) > 0 { + queryParameters["$skipToken"] = autorest.Encode("query", skipToken) + } + + preparer := autorest.CreatePreparer( + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/listSourceShareSynchronizationSettings", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListSourceShareSynchronizationSettingsSender sends the ListSourceShareSynchronizationSettings request. The method will close the +// http.Response Body if it receives an error. +func (client ShareSubscriptionsClient) ListSourceShareSynchronizationSettingsSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListSourceShareSynchronizationSettingsResponder handles the response to the ListSourceShareSynchronizationSettings request. The method always +// closes the http.Response Body. +func (client ShareSubscriptionsClient) ListSourceShareSynchronizationSettingsResponder(resp *http.Response) (result SourceShareSynchronizationSettingList, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listSourceShareSynchronizationSettingsNextResults retrieves the next set of results, if any. +func (client ShareSubscriptionsClient) listSourceShareSynchronizationSettingsNextResults(ctx context.Context, lastResults SourceShareSynchronizationSettingList) (result SourceShareSynchronizationSettingList, err error) { + req, err := lastResults.sourceShareSynchronizationSettingListPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "listSourceShareSynchronizationSettingsNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListSourceShareSynchronizationSettingsSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "listSourceShareSynchronizationSettingsNextResults", resp, "Failure sending next results request") + } + result, err = client.ListSourceShareSynchronizationSettingsResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "listSourceShareSynchronizationSettingsNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListSourceShareSynchronizationSettingsComplete enumerates all values, automatically crossing page boundaries as required. +func (client ShareSubscriptionsClient) ListSourceShareSynchronizationSettingsComplete(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, skipToken string) (result SourceShareSynchronizationSettingListIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareSubscriptionsClient.ListSourceShareSynchronizationSettings") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListSourceShareSynchronizationSettings(ctx, resourceGroupName, accountName, shareSubscriptionName, skipToken) + return +} + +// ListSynchronizationDetails list synchronization details +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareSubscriptionName - the name of the share subscription. +// shareSubscriptionSynchronization - share Subscription Synchronization payload. +// skipToken - continuation token +func (client ShareSubscriptionsClient) ListSynchronizationDetails(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, shareSubscriptionSynchronization ShareSubscriptionSynchronization, skipToken string) (result SynchronizationDetailsListPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareSubscriptionsClient.ListSynchronizationDetails") + defer func() { + sc := -1 + if result.sdl.Response.Response != nil { + sc = result.sdl.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + if err := validation.Validate([]validation.Validation{ + {TargetValue: shareSubscriptionSynchronization, + Constraints: []validation.Constraint{{Target: "shareSubscriptionSynchronization.SynchronizationID", Name: validation.Null, Rule: true, Chain: nil}}}}); err != nil { + return result, validation.NewError("datashare.ShareSubscriptionsClient", "ListSynchronizationDetails", err.Error()) + } + + result.fn = client.listSynchronizationDetailsNextResults + req, err := client.ListSynchronizationDetailsPreparer(ctx, resourceGroupName, accountName, shareSubscriptionName, shareSubscriptionSynchronization, skipToken) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "ListSynchronizationDetails", nil, "Failure preparing request") + return + } + + resp, err := client.ListSynchronizationDetailsSender(req) + if err != nil { + result.sdl.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "ListSynchronizationDetails", resp, "Failure sending request") + return + } + + result.sdl, err = client.ListSynchronizationDetailsResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "ListSynchronizationDetails", resp, "Failure responding to request") + } + + return +} + +// ListSynchronizationDetailsPreparer prepares the ListSynchronizationDetails request. +func (client ShareSubscriptionsClient) ListSynchronizationDetailsPreparer(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, shareSubscriptionSynchronization ShareSubscriptionSynchronization, skipToken string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareSubscriptionName": autorest.Encode("path", shareSubscriptionName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(skipToken) > 0 { + queryParameters["$skipToken"] = autorest.Encode("query", skipToken) + } + + shareSubscriptionSynchronization.DurationMs = nil + shareSubscriptionSynchronization.EndTime = nil + shareSubscriptionSynchronization.Message = nil + shareSubscriptionSynchronization.StartTime = nil + shareSubscriptionSynchronization.Status = nil + shareSubscriptionSynchronization.SynchronizationMode = "" + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/listSynchronizationDetails", pathParameters), + autorest.WithJSON(shareSubscriptionSynchronization), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListSynchronizationDetailsSender sends the ListSynchronizationDetails request. The method will close the +// http.Response Body if it receives an error. +func (client ShareSubscriptionsClient) ListSynchronizationDetailsSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListSynchronizationDetailsResponder handles the response to the ListSynchronizationDetails request. The method always +// closes the http.Response Body. +func (client ShareSubscriptionsClient) ListSynchronizationDetailsResponder(resp *http.Response) (result SynchronizationDetailsList, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listSynchronizationDetailsNextResults retrieves the next set of results, if any. +func (client ShareSubscriptionsClient) listSynchronizationDetailsNextResults(ctx context.Context, lastResults SynchronizationDetailsList) (result SynchronizationDetailsList, err error) { + req, err := lastResults.synchronizationDetailsListPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "listSynchronizationDetailsNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListSynchronizationDetailsSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "listSynchronizationDetailsNextResults", resp, "Failure sending next results request") + } + result, err = client.ListSynchronizationDetailsResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "listSynchronizationDetailsNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListSynchronizationDetailsComplete enumerates all values, automatically crossing page boundaries as required. +func (client ShareSubscriptionsClient) ListSynchronizationDetailsComplete(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, shareSubscriptionSynchronization ShareSubscriptionSynchronization, skipToken string) (result SynchronizationDetailsListIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareSubscriptionsClient.ListSynchronizationDetails") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListSynchronizationDetails(ctx, resourceGroupName, accountName, shareSubscriptionName, shareSubscriptionSynchronization, skipToken) + return +} + +// ListSynchronizations list synchronizations of a share subscription +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareSubscriptionName - the name of the share subscription. +// skipToken - continuation token +func (client ShareSubscriptionsClient) ListSynchronizations(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, skipToken string) (result ShareSubscriptionSynchronizationListPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareSubscriptionsClient.ListSynchronizations") + defer func() { + sc := -1 + if result.sssl.Response.Response != nil { + sc = result.sssl.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listSynchronizationsNextResults + req, err := client.ListSynchronizationsPreparer(ctx, resourceGroupName, accountName, shareSubscriptionName, skipToken) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "ListSynchronizations", nil, "Failure preparing request") + return + } + + resp, err := client.ListSynchronizationsSender(req) + if err != nil { + result.sssl.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "ListSynchronizations", resp, "Failure sending request") + return + } + + result.sssl, err = client.ListSynchronizationsResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "ListSynchronizations", resp, "Failure responding to request") + } + + return +} + +// ListSynchronizationsPreparer prepares the ListSynchronizations request. +func (client ShareSubscriptionsClient) ListSynchronizationsPreparer(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, skipToken string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareSubscriptionName": autorest.Encode("path", shareSubscriptionName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(skipToken) > 0 { + queryParameters["$skipToken"] = autorest.Encode("query", skipToken) + } + + preparer := autorest.CreatePreparer( + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/listSynchronizations", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListSynchronizationsSender sends the ListSynchronizations request. The method will close the +// http.Response Body if it receives an error. +func (client ShareSubscriptionsClient) ListSynchronizationsSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListSynchronizationsResponder handles the response to the ListSynchronizations request. The method always +// closes the http.Response Body. +func (client ShareSubscriptionsClient) ListSynchronizationsResponder(resp *http.Response) (result ShareSubscriptionSynchronizationList, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listSynchronizationsNextResults retrieves the next set of results, if any. +func (client ShareSubscriptionsClient) listSynchronizationsNextResults(ctx context.Context, lastResults ShareSubscriptionSynchronizationList) (result ShareSubscriptionSynchronizationList, err error) { + req, err := lastResults.shareSubscriptionSynchronizationListPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "listSynchronizationsNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListSynchronizationsSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "listSynchronizationsNextResults", resp, "Failure sending next results request") + } + result, err = client.ListSynchronizationsResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "listSynchronizationsNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListSynchronizationsComplete enumerates all values, automatically crossing page boundaries as required. +func (client ShareSubscriptionsClient) ListSynchronizationsComplete(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, skipToken string) (result ShareSubscriptionSynchronizationListIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareSubscriptionsClient.ListSynchronizations") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListSynchronizations(ctx, resourceGroupName, accountName, shareSubscriptionName, skipToken) + return +} + +// SynchronizeMethod initiate a copy +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareSubscriptionName - the name of share subscription +// synchronize - synchronize payload +func (client ShareSubscriptionsClient) SynchronizeMethod(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, synchronize Synchronize) (result ShareSubscriptionsSynchronizeMethodFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/ShareSubscriptionsClient.SynchronizeMethod") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.SynchronizeMethodPreparer(ctx, resourceGroupName, accountName, shareSubscriptionName, synchronize) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "SynchronizeMethod", nil, "Failure preparing request") + return + } + + result, err = client.SynchronizeMethodSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.ShareSubscriptionsClient", "SynchronizeMethod", result.Response(), "Failure sending request") + return + } + + return +} + +// SynchronizeMethodPreparer prepares the SynchronizeMethod request. +func (client ShareSubscriptionsClient) SynchronizeMethodPreparer(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, synchronize Synchronize) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareSubscriptionName": autorest.Encode("path", shareSubscriptionName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPost(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/Synchronize", pathParameters), + autorest.WithJSON(synchronize), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// SynchronizeMethodSender sends the SynchronizeMethod request. The method will close the +// http.Response Body if it receives an error. +func (client ShareSubscriptionsClient) SynchronizeMethodSender(req *http.Request) (future ShareSubscriptionsSynchronizeMethodFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// SynchronizeMethodResponder handles the response to the SynchronizeMethod request. The method always +// closes the http.Response Body. +func (client ShareSubscriptionsClient) SynchronizeMethodResponder(resp *http.Response) (result ShareSubscriptionSynchronization, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/synchronizationsettings.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/synchronizationsettings.go new file mode 100644 index 000000000000..0223d3e28ef3 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/synchronizationsettings.go @@ -0,0 +1,406 @@ +package datashare + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// SynchronizationSettingsClient is the creates a Microsoft.DataShare management client. +type SynchronizationSettingsClient struct { + BaseClient +} + +// NewSynchronizationSettingsClient creates an instance of the SynchronizationSettingsClient client. +func NewSynchronizationSettingsClient(subscriptionID string) SynchronizationSettingsClient { + return NewSynchronizationSettingsClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewSynchronizationSettingsClientWithBaseURI creates an instance of the SynchronizationSettingsClient client using a +// custom endpoint. Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, +// Azure stack). +func NewSynchronizationSettingsClientWithBaseURI(baseURI string, subscriptionID string) SynchronizationSettingsClient { + return SynchronizationSettingsClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// Create create or update a synchronizationSetting +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share to add the synchronization setting to. +// synchronizationSettingName - the name of the synchronizationSetting. +// synchronizationSetting - the new synchronization setting information. +func (client SynchronizationSettingsClient) Create(ctx context.Context, resourceGroupName string, accountName string, shareName string, synchronizationSettingName string, synchronizationSetting BasicSynchronizationSetting) (result SynchronizationSettingModel, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SynchronizationSettingsClient.Create") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.CreatePreparer(ctx, resourceGroupName, accountName, shareName, synchronizationSettingName, synchronizationSetting) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SynchronizationSettingsClient", "Create", nil, "Failure preparing request") + return + } + + resp, err := client.CreateSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.SynchronizationSettingsClient", "Create", resp, "Failure sending request") + return + } + + result, err = client.CreateResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SynchronizationSettingsClient", "Create", resp, "Failure responding to request") + } + + return +} + +// CreatePreparer prepares the Create request. +func (client SynchronizationSettingsClient) CreatePreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string, synchronizationSettingName string, synchronizationSetting BasicSynchronizationSetting) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + "synchronizationSettingName": autorest.Encode("path", synchronizationSettingName), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings/{synchronizationSettingName}", pathParameters), + autorest.WithJSON(synchronizationSetting), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateSender sends the Create request. The method will close the +// http.Response Body if it receives an error. +func (client SynchronizationSettingsClient) CreateSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// CreateResponder handles the response to the Create request. The method always +// closes the http.Response Body. +func (client SynchronizationSettingsClient) CreateResponder(resp *http.Response) (result SynchronizationSettingModel, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete delete a synchronizationSetting in a share +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share. +// synchronizationSettingName - the name of the synchronizationSetting . +func (client SynchronizationSettingsClient) Delete(ctx context.Context, resourceGroupName string, accountName string, shareName string, synchronizationSettingName string) (result SynchronizationSettingsDeleteFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SynchronizationSettingsClient.Delete") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.DeletePreparer(ctx, resourceGroupName, accountName, shareName, synchronizationSettingName) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SynchronizationSettingsClient", "Delete", nil, "Failure preparing request") + return + } + + result, err = client.DeleteSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SynchronizationSettingsClient", "Delete", result.Response(), "Failure sending request") + return + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client SynchronizationSettingsClient) DeletePreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string, synchronizationSettingName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + "synchronizationSettingName": autorest.Encode("path", synchronizationSettingName), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings/{synchronizationSettingName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client SynchronizationSettingsClient) DeleteSender(req *http.Request) (future SynchronizationSettingsDeleteFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client SynchronizationSettingsClient) DeleteResponder(resp *http.Response) (result OperationResponse, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Get get a synchronizationSetting in a share +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share. +// synchronizationSettingName - the name of the synchronizationSetting. +func (client SynchronizationSettingsClient) Get(ctx context.Context, resourceGroupName string, accountName string, shareName string, synchronizationSettingName string) (result SynchronizationSettingModel, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SynchronizationSettingsClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.GetPreparer(ctx, resourceGroupName, accountName, shareName, synchronizationSettingName) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SynchronizationSettingsClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.SynchronizationSettingsClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SynchronizationSettingsClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client SynchronizationSettingsClient) GetPreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string, synchronizationSettingName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + "synchronizationSettingName": autorest.Encode("path", synchronizationSettingName), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings/{synchronizationSettingName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client SynchronizationSettingsClient) GetSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client SynchronizationSettingsClient) GetResponder(resp *http.Response) (result SynchronizationSettingModel, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByShare list synchronizationSettings in a share +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareName - the name of the share. +// skipToken - continuation token +func (client SynchronizationSettingsClient) ListByShare(ctx context.Context, resourceGroupName string, accountName string, shareName string, skipToken string) (result SynchronizationSettingListPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SynchronizationSettingsClient.ListByShare") + defer func() { + sc := -1 + if result.ssl.Response.Response != nil { + sc = result.ssl.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listByShareNextResults + req, err := client.ListBySharePreparer(ctx, resourceGroupName, accountName, shareName, skipToken) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SynchronizationSettingsClient", "ListByShare", nil, "Failure preparing request") + return + } + + resp, err := client.ListByShareSender(req) + if err != nil { + result.ssl.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.SynchronizationSettingsClient", "ListByShare", resp, "Failure sending request") + return + } + + result.ssl, err = client.ListByShareResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SynchronizationSettingsClient", "ListByShare", resp, "Failure responding to request") + } + + return +} + +// ListBySharePreparer prepares the ListByShare request. +func (client SynchronizationSettingsClient) ListBySharePreparer(ctx context.Context, resourceGroupName string, accountName string, shareName string, skipToken string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareName": autorest.Encode("path", shareName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(skipToken) > 0 { + queryParameters["$skipToken"] = autorest.Encode("query", skipToken) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shares/{shareName}/synchronizationSettings", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByShareSender sends the ListByShare request. The method will close the +// http.Response Body if it receives an error. +func (client SynchronizationSettingsClient) ListByShareSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListByShareResponder handles the response to the ListByShare request. The method always +// closes the http.Response Body. +func (client SynchronizationSettingsClient) ListByShareResponder(resp *http.Response) (result SynchronizationSettingList, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByShareNextResults retrieves the next set of results, if any. +func (client SynchronizationSettingsClient) listByShareNextResults(ctx context.Context, lastResults SynchronizationSettingList) (result SynchronizationSettingList, err error) { + req, err := lastResults.synchronizationSettingListPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "datashare.SynchronizationSettingsClient", "listByShareNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByShareSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "datashare.SynchronizationSettingsClient", "listByShareNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByShareResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.SynchronizationSettingsClient", "listByShareNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByShareComplete enumerates all values, automatically crossing page boundaries as required. +func (client SynchronizationSettingsClient) ListByShareComplete(ctx context.Context, resourceGroupName string, accountName string, shareName string, skipToken string) (result SynchronizationSettingListIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/SynchronizationSettingsClient.ListByShare") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListByShare(ctx, resourceGroupName, accountName, shareName, skipToken) + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/triggers.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/triggers.go new file mode 100644 index 000000000000..49324c58b8b0 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/triggers.go @@ -0,0 +1,405 @@ +package datashare + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +import ( + "context" + "github.com/Azure/go-autorest/autorest" + "github.com/Azure/go-autorest/autorest/azure" + "github.com/Azure/go-autorest/tracing" + "net/http" +) + +// TriggersClient is the creates a Microsoft.DataShare management client. +type TriggersClient struct { + BaseClient +} + +// NewTriggersClient creates an instance of the TriggersClient client. +func NewTriggersClient(subscriptionID string) TriggersClient { + return NewTriggersClientWithBaseURI(DefaultBaseURI, subscriptionID) +} + +// NewTriggersClientWithBaseURI creates an instance of the TriggersClient client using a custom endpoint. Use this +// when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack). +func NewTriggersClientWithBaseURI(baseURI string, subscriptionID string) TriggersClient { + return TriggersClient{NewWithBaseURI(baseURI, subscriptionID)} +} + +// Create create a Trigger +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareSubscriptionName - the name of the share subscription which will hold the data set sink. +// triggerName - the name of the trigger. +// trigger - trigger details. +func (client TriggersClient) Create(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, triggerName string, trigger BasicTrigger) (result TriggersCreateFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/TriggersClient.Create") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.CreatePreparer(ctx, resourceGroupName, accountName, shareSubscriptionName, triggerName, trigger) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.TriggersClient", "Create", nil, "Failure preparing request") + return + } + + result, err = client.CreateSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.TriggersClient", "Create", result.Response(), "Failure sending request") + return + } + + return +} + +// CreatePreparer prepares the Create request. +func (client TriggersClient) CreatePreparer(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, triggerName string, trigger BasicTrigger) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareSubscriptionName": autorest.Encode("path", shareSubscriptionName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + "triggerName": autorest.Encode("path", triggerName), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsContentType("application/json; charset=utf-8"), + autorest.AsPut(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}", pathParameters), + autorest.WithJSON(trigger), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// CreateSender sends the Create request. The method will close the +// http.Response Body if it receives an error. +func (client TriggersClient) CreateSender(req *http.Request) (future TriggersCreateFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// CreateResponder handles the response to the Create request. The method always +// closes the http.Response Body. +func (client TriggersClient) CreateResponder(resp *http.Response) (result TriggerModel, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Delete delete a Trigger in a shareSubscription +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareSubscriptionName - the name of the shareSubscription. +// triggerName - the name of the trigger. +func (client TriggersClient) Delete(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, triggerName string) (result TriggersDeleteFuture, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/TriggersClient.Delete") + defer func() { + sc := -1 + if result.Response() != nil { + sc = result.Response().StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.DeletePreparer(ctx, resourceGroupName, accountName, shareSubscriptionName, triggerName) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.TriggersClient", "Delete", nil, "Failure preparing request") + return + } + + result, err = client.DeleteSender(req) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.TriggersClient", "Delete", result.Response(), "Failure sending request") + return + } + + return +} + +// DeletePreparer prepares the Delete request. +func (client TriggersClient) DeletePreparer(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, triggerName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareSubscriptionName": autorest.Encode("path", shareSubscriptionName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + "triggerName": autorest.Encode("path", triggerName), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsDelete(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// DeleteSender sends the Delete request. The method will close the +// http.Response Body if it receives an error. +func (client TriggersClient) DeleteSender(req *http.Request) (future TriggersDeleteFuture, err error) { + var resp *http.Response + resp, err = client.Send(req, azure.DoRetryWithRegistration(client.Client)) + if err != nil { + return + } + future.Future, err = azure.NewFutureFromResponse(resp) + return +} + +// DeleteResponder handles the response to the Delete request. The method always +// closes the http.Response Body. +func (client TriggersClient) DeleteResponder(resp *http.Response) (result OperationResponse, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// Get get a Trigger in a shareSubscription +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareSubscriptionName - the name of the shareSubscription. +// triggerName - the name of the trigger. +func (client TriggersClient) Get(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, triggerName string) (result TriggerModel, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/TriggersClient.Get") + defer func() { + sc := -1 + if result.Response.Response != nil { + sc = result.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + req, err := client.GetPreparer(ctx, resourceGroupName, accountName, shareSubscriptionName, triggerName) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.TriggersClient", "Get", nil, "Failure preparing request") + return + } + + resp, err := client.GetSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.TriggersClient", "Get", resp, "Failure sending request") + return + } + + result, err = client.GetResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.TriggersClient", "Get", resp, "Failure responding to request") + } + + return +} + +// GetPreparer prepares the Get request. +func (client TriggersClient) GetPreparer(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, triggerName string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareSubscriptionName": autorest.Encode("path", shareSubscriptionName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + "triggerName": autorest.Encode("path", triggerName), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers/{triggerName}", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// GetSender sends the Get request. The method will close the +// http.Response Body if it receives an error. +func (client TriggersClient) GetSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// GetResponder handles the response to the Get request. The method always +// closes the http.Response Body. +func (client TriggersClient) GetResponder(resp *http.Response) (result TriggerModel, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// ListByShareSubscription list Triggers in a share subscription +// Parameters: +// resourceGroupName - the resource group name. +// accountName - the name of the share account. +// shareSubscriptionName - the name of the share subscription. +// skipToken - continuation token +func (client TriggersClient) ListByShareSubscription(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, skipToken string) (result TriggerListPage, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/TriggersClient.ListByShareSubscription") + defer func() { + sc := -1 + if result.tl.Response.Response != nil { + sc = result.tl.Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.fn = client.listByShareSubscriptionNextResults + req, err := client.ListByShareSubscriptionPreparer(ctx, resourceGroupName, accountName, shareSubscriptionName, skipToken) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.TriggersClient", "ListByShareSubscription", nil, "Failure preparing request") + return + } + + resp, err := client.ListByShareSubscriptionSender(req) + if err != nil { + result.tl.Response = autorest.Response{Response: resp} + err = autorest.NewErrorWithError(err, "datashare.TriggersClient", "ListByShareSubscription", resp, "Failure sending request") + return + } + + result.tl, err = client.ListByShareSubscriptionResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.TriggersClient", "ListByShareSubscription", resp, "Failure responding to request") + } + + return +} + +// ListByShareSubscriptionPreparer prepares the ListByShareSubscription request. +func (client TriggersClient) ListByShareSubscriptionPreparer(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, skipToken string) (*http.Request, error) { + pathParameters := map[string]interface{}{ + "accountName": autorest.Encode("path", accountName), + "resourceGroupName": autorest.Encode("path", resourceGroupName), + "shareSubscriptionName": autorest.Encode("path", shareSubscriptionName), + "subscriptionId": autorest.Encode("path", client.SubscriptionID), + } + + const APIVersion = "2019-11-01" + queryParameters := map[string]interface{}{ + "api-version": APIVersion, + } + if len(skipToken) > 0 { + queryParameters["$skipToken"] = autorest.Encode("query", skipToken) + } + + preparer := autorest.CreatePreparer( + autorest.AsGet(), + autorest.WithBaseURL(client.BaseURI), + autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataShare/accounts/{accountName}/shareSubscriptions/{shareSubscriptionName}/triggers", pathParameters), + autorest.WithQueryParameters(queryParameters)) + return preparer.Prepare((&http.Request{}).WithContext(ctx)) +} + +// ListByShareSubscriptionSender sends the ListByShareSubscription request. The method will close the +// http.Response Body if it receives an error. +func (client TriggersClient) ListByShareSubscriptionSender(req *http.Request) (*http.Response, error) { + return client.Send(req, azure.DoRetryWithRegistration(client.Client)) +} + +// ListByShareSubscriptionResponder handles the response to the ListByShareSubscription request. The method always +// closes the http.Response Body. +func (client TriggersClient) ListByShareSubscriptionResponder(resp *http.Response) (result TriggerList, err error) { + err = autorest.Respond( + resp, + client.ByInspecting(), + azure.WithErrorUnlessStatusCode(http.StatusOK), + autorest.ByUnmarshallingJSON(&result), + autorest.ByClosing()) + result.Response = autorest.Response{Response: resp} + return +} + +// listByShareSubscriptionNextResults retrieves the next set of results, if any. +func (client TriggersClient) listByShareSubscriptionNextResults(ctx context.Context, lastResults TriggerList) (result TriggerList, err error) { + req, err := lastResults.triggerListPreparer(ctx) + if err != nil { + return result, autorest.NewErrorWithError(err, "datashare.TriggersClient", "listByShareSubscriptionNextResults", nil, "Failure preparing next results request") + } + if req == nil { + return + } + resp, err := client.ListByShareSubscriptionSender(req) + if err != nil { + result.Response = autorest.Response{Response: resp} + return result, autorest.NewErrorWithError(err, "datashare.TriggersClient", "listByShareSubscriptionNextResults", resp, "Failure sending next results request") + } + result, err = client.ListByShareSubscriptionResponder(resp) + if err != nil { + err = autorest.NewErrorWithError(err, "datashare.TriggersClient", "listByShareSubscriptionNextResults", resp, "Failure responding to next results request") + } + return +} + +// ListByShareSubscriptionComplete enumerates all values, automatically crossing page boundaries as required. +func (client TriggersClient) ListByShareSubscriptionComplete(ctx context.Context, resourceGroupName string, accountName string, shareSubscriptionName string, skipToken string) (result TriggerListIterator, err error) { + if tracing.IsEnabled() { + ctx = tracing.StartSpan(ctx, fqdn+"/TriggersClient.ListByShareSubscription") + defer func() { + sc := -1 + if result.Response().Response.Response != nil { + sc = result.page.Response().Response.Response.StatusCode + } + tracing.EndSpan(ctx, sc, err) + }() + } + result.page, err = client.ListByShareSubscription(ctx, resourceGroupName, accountName, shareSubscriptionName, skipToken) + return +} diff --git a/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/version.go b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/version.go new file mode 100644 index 000000000000..5a612e2b6209 --- /dev/null +++ b/vendor/github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare/version.go @@ -0,0 +1,30 @@ +package datashare + +import "github.com/Azure/azure-sdk-for-go/version" + +// Copyright (c) Microsoft and contributors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// +// See the License for the specific language governing permissions and +// limitations under the License. +// +// Code generated by Microsoft (R) AutoRest Code Generator. +// Changes may cause incorrect behavior and will be lost if the code is regenerated. + +// UserAgent returns the UserAgent string to use when sending http.Requests. +func UserAgent() string { + return "Azure-SDK-For-Go/" + version.Number + " datashare/2019-11-01" +} + +// Version returns the semantic version (see http://semver.org) of the client. +func Version() string { + return version.Number +} diff --git a/vendor/modules.txt b/vendor/modules.txt index 06ff64d2f3ed..779961db7630 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -28,6 +28,7 @@ github.com/Azure/azure-sdk-for-go/services/datalake/analytics/mgmt/2016-11-01/ac github.com/Azure/azure-sdk-for-go/services/datalake/store/2016-11-01/filesystem github.com/Azure/azure-sdk-for-go/services/datalake/store/mgmt/2016-11-01/account github.com/Azure/azure-sdk-for-go/services/datamigration/mgmt/2018-04-19/datamigration +github.com/Azure/azure-sdk-for-go/services/datashare/mgmt/2019-11-01/datashare github.com/Azure/azure-sdk-for-go/services/devspaces/mgmt/2019-04-01/devspaces github.com/Azure/azure-sdk-for-go/services/devtestlabs/mgmt/2016-05-15/dtl github.com/Azure/azure-sdk-for-go/services/dns/mgmt/2018-05-01/dns diff --git a/website/allowed-subcategories b/website/allowed-subcategories index 2b5cf6a9b9e4..ab3278a2fa42 100644 --- a/website/allowed-subcategories +++ b/website/allowed-subcategories @@ -19,6 +19,7 @@ DNS Data Explorer Data Factory Data Lake +Data Share Database Database Migration Databricks diff --git a/website/azurerm.erb b/website/azurerm.erb index 4e6139df56be..6c5a4240f2f0 100644 --- a/website/azurerm.erb +++ b/website/azurerm.erb @@ -186,6 +186,10 @@ azurerm_data_lake_store +
  • + azurerm_data_share_account +
  • +
  • azurerm_dedicated_host
  • @@ -1411,6 +1415,16 @@ +
  • + Data Share Resources + +
  • + +
  • DevSpace Resources