diff --git a/.changelog/6680.txt b/.changelog/6680.txt deleted file mode 100644 index 72ca889a60..0000000000 --- a/.changelog/6680.txt +++ /dev/null @@ -1,3 +0,0 @@ -```release-note:enhancement -bigquery: supported authorized routines on resource `bigquery_dataset` and `bigquery_dataset_access` -``` diff --git a/google/iam_bigquery_dataset.go b/google/iam_bigquery_dataset.go index e3375d9e9c..2ad13d7ddb 100644 --- a/google/iam_bigquery_dataset.go +++ b/google/iam_bigquery_dataset.go @@ -242,10 +242,6 @@ func accessToIamMember(access map[string]interface{}) (string, error) { // dataset does not map to an IAM member, use access instead return "", fmt.Errorf("Failed to convert BigQuery Dataset access to IAM member. To use views with a dataset, please use dataset_access") } - if _, ok := access["routine"]; ok { - // dataset does not map to an IAM member, use access instead - return "", fmt.Errorf("Failed to convert BigQuery Dataset access to IAM member. To use views with a dataset, please use dataset_access") - } if member, ok := access["userByEmail"]; ok { // service accounts have "gservice" in their email. This is best guess due to lost information if strings.Contains(member.(string), "gserviceaccount") { diff --git a/google/resource_bigquery_dataset.go b/google/resource_bigquery_dataset.go index ed8b3107f7..d1912477c1 100644 --- a/google/resource_bigquery_dataset.go +++ b/google/resource_bigquery_dataset.go @@ -286,37 +286,6 @@ are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See [official docs](https://cloud.google.com/bigquery/docs/access-control).`, }, - "routine": { - Type: schema.TypeList, - Optional: true, - Description: `A routine from a different dataset to grant access to. Queries -executed against that routine will have read access to tables in -this dataset. The role field is not required when this field is -set. If that routine is updated by any user, access to the routine -needs to be granted again via an update operation.`, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "dataset_id": { - Type: schema.TypeString, - Required: true, - Description: `The ID of the dataset containing this table.`, - }, - "project_id": { - Type: schema.TypeString, - Required: true, - Description: `The ID of the project containing this table.`, - }, - "routine_id": { - Type: schema.TypeString, - Required: true, - Description: `The ID of the routine. The ID must contain only letters (a-z, -A-Z), numbers (0-9), or underscores (_). The maximum length -is 256 characters.`, - }, - }, - }, - }, "special_group": { Type: schema.TypeString, Optional: true, @@ -764,7 +733,6 @@ func flattenBigQueryDatasetAccess(v interface{}, d *schema.ResourceData, config "user_by_email": flattenBigQueryDatasetAccessUserByEmail(original["userByEmail"], d, config), "view": flattenBigQueryDatasetAccessView(original["view"], d, config), "dataset": flattenBigQueryDatasetAccessDataset(original["dataset"], d, config), - "routine": flattenBigQueryDatasetAccessRoutine(original["routine"], d, config), }) } return transformed @@ -860,35 +828,6 @@ func flattenBigQueryDatasetAccessDatasetTargetTypes(v interface{}, d *schema.Res return v } -func flattenBigQueryDatasetAccessRoutine(v interface{}, d *schema.ResourceData, config *Config) interface{} { - if v == nil { - return nil - } - original := v.(map[string]interface{}) - if len(original) == 0 { - return nil - } - transformed := make(map[string]interface{}) - transformed["dataset_id"] = - flattenBigQueryDatasetAccessRoutineDatasetId(original["datasetId"], d, config) - transformed["project_id"] = - flattenBigQueryDatasetAccessRoutineProjectId(original["projectId"], d, config) - transformed["routine_id"] = - flattenBigQueryDatasetAccessRoutineRoutineId(original["routineId"], d, config) - return []interface{}{transformed} -} -func flattenBigQueryDatasetAccessRoutineDatasetId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v -} - -func flattenBigQueryDatasetAccessRoutineProjectId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v -} - -func flattenBigQueryDatasetAccessRoutineRoutineId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v -} - func flattenBigQueryDatasetCreationTime(v interface{}, d *schema.ResourceData, config *Config) interface{} { // Handles the string fixed64 format if strVal, ok := v.(string); ok { @@ -1081,13 +1020,6 @@ func expandBigQueryDatasetAccess(v interface{}, d TerraformResourceData, config transformed["dataset"] = transformedDataset } - transformedRoutine, err := expandBigQueryDatasetAccessRoutine(original["routine"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedRoutine); val.IsValid() && !isEmptyValue(val) { - transformed["routine"] = transformedRoutine - } - req = append(req, transformed) } return req, nil @@ -1222,51 +1154,6 @@ func expandBigQueryDatasetAccessDatasetTargetTypes(v interface{}, d TerraformRes return v, nil } -func expandBigQueryDatasetAccessRoutine(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - l := v.([]interface{}) - if len(l) == 0 || l[0] == nil { - return nil, nil - } - raw := l[0] - original := raw.(map[string]interface{}) - transformed := make(map[string]interface{}) - - transformedDatasetId, err := expandBigQueryDatasetAccessRoutineDatasetId(original["dataset_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedDatasetId); val.IsValid() && !isEmptyValue(val) { - transformed["datasetId"] = transformedDatasetId - } - - transformedProjectId, err := expandBigQueryDatasetAccessRoutineProjectId(original["project_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedProjectId); val.IsValid() && !isEmptyValue(val) { - transformed["projectId"] = transformedProjectId - } - - transformedRoutineId, err := expandBigQueryDatasetAccessRoutineRoutineId(original["routine_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedRoutineId); val.IsValid() && !isEmptyValue(val) { - transformed["routineId"] = transformedRoutineId - } - - return transformed, nil -} - -func expandBigQueryDatasetAccessRoutineDatasetId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil -} - -func expandBigQueryDatasetAccessRoutineProjectId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil -} - -func expandBigQueryDatasetAccessRoutineRoutineId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil -} - func expandBigQueryDatasetDatasetReference(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { transformed := make(map[string]interface{}) transformedDatasetId, err := expandBigQueryDatasetDatasetReferenceDatasetId(d.Get("dataset_id"), d, config) diff --git a/google/resource_bigquery_dataset_access.go b/google/resource_bigquery_dataset_access.go index cd156d3903..df9c610c6e 100644 --- a/google/resource_bigquery_dataset_access.go +++ b/google/resource_bigquery_dataset_access.go @@ -202,7 +202,7 @@ but additional target types may be added in the future. Possible values: VIEWS`, }, }, }, - ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset", "routine"}, + ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset"}, }, "domain": { Type: schema.TypeString, @@ -211,7 +211,7 @@ but additional target types may be added in the future. Possible values: VIEWS`, DiffSuppressFunc: resourceBigQueryDatasetAccessIamMemberDiffSuppress, Description: `A domain to grant access to. Any users signed in with the domain specified will be granted the specified access`, - ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset", "routine"}, + ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset"}, }, "group_by_email": { Type: schema.TypeString, @@ -219,7 +219,7 @@ domain specified will be granted the specified access`, ForceNew: true, DiffSuppressFunc: resourceBigQueryDatasetAccessIamMemberDiffSuppress, Description: `An email address of a Google Group to grant access to.`, - ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset", "routine"}, + ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset"}, }, "iam_member": { Type: schema.TypeString, @@ -228,7 +228,7 @@ domain specified will be granted the specified access`, DiffSuppressFunc: resourceBigQueryDatasetAccessIamMemberDiffSuppress, Description: `Some other type of member that appears in the IAM Policy but isn't a user, group, domain, or special group. For example: 'allUsers'`, - ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset", "routine"}, + ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset"}, }, "role": { Type: schema.TypeString, @@ -242,42 +242,6 @@ swapped by the API to their basic counterparts, and will show a diff post-create. See [official docs](https://cloud.google.com/bigquery/docs/access-control).`, }, - "routine": { - Type: schema.TypeList, - Optional: true, - ForceNew: true, - Description: `A routine from a different dataset to grant access to. Queries -executed against that routine will have read access to tables in -this dataset. The role field is not required when this field is -set. If that routine is updated by any user, access to the routine -needs to be granted again via an update operation.`, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "dataset_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: `The ID of the dataset containing this table.`, - }, - "project_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: `The ID of the project containing this table.`, - }, - "routine_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: `The ID of the routine. The ID must contain only letters (a-z, -A-Z), numbers (0-9), or underscores (_). The maximum length -is 256 characters.`, - }, - }, - }, - ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset", "routine"}, - }, "special_group": { Type: schema.TypeString, Optional: true, @@ -296,7 +260,7 @@ is 256 characters.`, * 'allAuthenticatedUsers': All authenticated BigQuery users.`, - ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset", "routine"}, + ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset"}, }, "user_by_email": { Type: schema.TypeString, @@ -305,7 +269,7 @@ is 256 characters.`, DiffSuppressFunc: resourceBigQueryDatasetAccessIamMemberDiffSuppress, Description: `An email address of a user to grant access to. For example: fred@example.com`, - ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset", "routine"}, + ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset"}, }, "view": { Type: schema.TypeList, @@ -341,7 +305,7 @@ is 1,024 characters.`, }, }, }, - ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset", "routine"}, + ExactlyOneOf: []string{"user_by_email", "group_by_email", "domain", "special_group", "iam_member", "view", "dataset"}, }, "api_updated_member": { Type: schema.TypeBool, @@ -421,12 +385,6 @@ func resourceBigQueryDatasetAccessCreate(d *schema.ResourceData, meta interface{ } else if v, ok := d.GetOkExists("dataset"); !isEmptyValue(reflect.ValueOf(datasetProp)) && (ok || !reflect.DeepEqual(v, datasetProp)) { obj["dataset"] = datasetProp } - routineProp, err := expandNestedBigQueryDatasetAccessRoutine(d.Get("routine"), d, config) - if err != nil { - return err - } else if v, ok := d.GetOkExists("routine"); !isEmptyValue(reflect.ValueOf(routineProp)) && (ok || !reflect.DeepEqual(v, routineProp)) { - obj["routine"] = routineProp - } lockName, err := replaceVars(d, config, "{{dataset_id}}") if err != nil { @@ -574,9 +532,6 @@ func resourceBigQueryDatasetAccessRead(d *schema.ResourceData, meta interface{}) if err := d.Set("dataset", flattenNestedBigQueryDatasetAccessDataset(res["dataset"], d, config)); err != nil { return fmt.Errorf("Error reading DatasetAccess: %s", err) } - if err := d.Set("routine", flattenNestedBigQueryDatasetAccessRoutine(res["routine"], d, config)); err != nil { - return fmt.Errorf("Error reading DatasetAccess: %s", err) - } return nil } @@ -725,35 +680,6 @@ func flattenNestedBigQueryDatasetAccessDatasetTargetTypes(v interface{}, d *sche return v } -func flattenNestedBigQueryDatasetAccessRoutine(v interface{}, d *schema.ResourceData, config *Config) interface{} { - if v == nil { - return nil - } - original := v.(map[string]interface{}) - if len(original) == 0 { - return nil - } - transformed := make(map[string]interface{}) - transformed["dataset_id"] = - flattenNestedBigQueryDatasetAccessRoutineDatasetId(original["datasetId"], d, config) - transformed["project_id"] = - flattenNestedBigQueryDatasetAccessRoutineProjectId(original["projectId"], d, config) - transformed["routine_id"] = - flattenNestedBigQueryDatasetAccessRoutineRoutineId(original["routineId"], d, config) - return []interface{}{transformed} -} -func flattenNestedBigQueryDatasetAccessRoutineDatasetId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v -} - -func flattenNestedBigQueryDatasetAccessRoutineProjectId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v -} - -func flattenNestedBigQueryDatasetAccessRoutineRoutineId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v -} - func expandNestedBigQueryDatasetAccessDatasetId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { return v, nil } @@ -898,51 +824,6 @@ func expandNestedBigQueryDatasetAccessDatasetTargetTypes(v interface{}, d Terraf return v, nil } -func expandNestedBigQueryDatasetAccessRoutine(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - l := v.([]interface{}) - if len(l) == 0 || l[0] == nil { - return nil, nil - } - raw := l[0] - original := raw.(map[string]interface{}) - transformed := make(map[string]interface{}) - - transformedDatasetId, err := expandNestedBigQueryDatasetAccessRoutineDatasetId(original["dataset_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedDatasetId); val.IsValid() && !isEmptyValue(val) { - transformed["datasetId"] = transformedDatasetId - } - - transformedProjectId, err := expandNestedBigQueryDatasetAccessRoutineProjectId(original["project_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedProjectId); val.IsValid() && !isEmptyValue(val) { - transformed["projectId"] = transformedProjectId - } - - transformedRoutineId, err := expandNestedBigQueryDatasetAccessRoutineRoutineId(original["routine_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedRoutineId); val.IsValid() && !isEmptyValue(val) { - transformed["routineId"] = transformedRoutineId - } - - return transformed, nil -} - -func expandNestedBigQueryDatasetAccessRoutineDatasetId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil -} - -func expandNestedBigQueryDatasetAccessRoutineProjectId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil -} - -func expandNestedBigQueryDatasetAccessRoutineRoutineId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil -} - func flattenNestedBigQueryDatasetAccess(d *schema.ResourceData, meta interface{}, res map[string]interface{}) (map[string]interface{}, error) { var v interface{} var ok bool @@ -1010,11 +891,6 @@ func resourceBigQueryDatasetAccessFindNestedObjectInList(d *schema.ResourceData, return -1, nil, err } expectedFlattenedDataset := flattenNestedBigQueryDatasetAccessDataset(expectedDataset, d, meta.(*Config)) - expectedRoutine, err := expandNestedBigQueryDatasetAccessRoutine(d.Get("routine"), d, meta.(*Config)) - if err != nil { - return -1, nil, err - } - expectedFlattenedRoutine := flattenNestedBigQueryDatasetAccessRoutine(expectedRoutine, d, meta.(*Config)) // Search list for this resource. for idx, itemRaw := range items { @@ -1071,12 +947,6 @@ func resourceBigQueryDatasetAccessFindNestedObjectInList(d *schema.ResourceData, log.Printf("[DEBUG] Skipping item with dataset= %#v, looking for %#v)", itemDataset, expectedFlattenedDataset) continue } - itemRoutine := flattenNestedBigQueryDatasetAccessRoutine(item["routine"], d, meta.(*Config)) - // isEmptyValue check so that if one is nil and the other is "", that's considered a match - if !(isEmptyValue(reflect.ValueOf(itemRoutine)) && isEmptyValue(reflect.ValueOf(expectedFlattenedRoutine))) && !reflect.DeepEqual(itemRoutine, expectedFlattenedRoutine) { - log.Printf("[DEBUG] Skipping item with routine= %#v, looking for %#v)", itemRoutine, expectedFlattenedRoutine) - continue - } log.Printf("[DEBUG] Found item for resource %q: %#v)", d.Id(), item) return idx, item, nil } diff --git a/google/resource_bigquery_dataset_access_test.go b/google/resource_bigquery_dataset_access_test.go index bb1b5a8b83..0aa0010cb2 100644 --- a/google/resource_bigquery_dataset_access_test.go +++ b/google/resource_bigquery_dataset_access_test.go @@ -100,42 +100,6 @@ func TestAccBigQueryDatasetAccess_authorizedDataset(t *testing.T) { }) } -func TestAccBigQueryDatasetAccess_authorizedRoutine(t *testing.T) { - // Multiple fine-grained resources - skipIfVcr(t) - t.Parallel() - - context := map[string]interface{}{ - "public_dataset": fmt.Sprintf("tf_test_public_dataset_%s", randString(t, 10)), - "public_routine": fmt.Sprintf("tf_test_public_routine_%s", randString(t, 10)), - "private_dataset": fmt.Sprintf("tf_test_private_dataset_%s", randString(t, 10)), - } - - expected := map[string]interface{}{ - "routine": map[string]interface{}{ - "projectId": getTestProjectFromEnv(), - "datasetId": context["public_dataset"], - "routineId": context["public_routine"], - }, - } - - vcrTest(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - Steps: []resource.TestStep{ - { - Config: testAccBigQueryDatasetAccess_authorizedRoutine(context), - Check: testAccCheckBigQueryDatasetAccessPresent(t, "google_bigquery_dataset.private", expected), - }, - { - // Destroy step instead of CheckDestroy so we can check the access is removed without deleting the dataset - Config: testAccBigQueryDatasetAccess_destroy(context["private_dataset"].(string), "private"), - Check: testAccCheckBigQueryDatasetAccessAbsent(t, "google_bigquery_dataset.private", expected), - }, - }, - }) -} - func TestAccBigQueryDatasetAccess_multiple(t *testing.T) { // Multiple fine-grained resources skipIfVcr(t) @@ -394,47 +358,6 @@ resource "google_bigquery_dataset" "public" { `, datasetID, datasetID2) } -func testAccBigQueryDatasetAccess_authorizedRoutine(context map[string]interface{}) string { - return Nprintf(` -resource "google_bigquery_dataset" "public" { - dataset_id = "%{public_dataset}" - description = "This dataset is public" -} - -resource "google_bigquery_routine" "public" { - dataset_id = google_bigquery_dataset.public.dataset_id - routine_id = "%{public_routine}" - routine_type = "TABLE_VALUED_FUNCTION" - language = "SQL" - definition_body = <<-EOS - SELECT 1 + value AS value - EOS - arguments { - name = "value" - argument_kind = "FIXED_TYPE" - data_type = jsonencode({ "typeKind" = "INT64" }) - } - return_table_type = jsonencode({ "columns" = [ - { "name" = "value", "type" = { "typeKind" = "INT64" } }, - ] }) -} - -resource "google_bigquery_dataset" "private" { - dataset_id = "%{private_dataset}" - description = "This dataset is private" -} - -resource "google_bigquery_dataset_access" "authorized_routine" { - dataset_id = google_bigquery_dataset.private.dataset_id - routine { - project_id = google_bigquery_routine.public.project - dataset_id = google_bigquery_routine.public.dataset_id - routine_id = google_bigquery_routine.public.routine_id - } -} -`, context) -} - func testAccBigQueryDatasetAccess_multiple(datasetID string) string { return fmt.Sprintf(` resource "google_bigquery_dataset_access" "access" { diff --git a/google/resource_bigquery_dataset_generated_test.go b/google/resource_bigquery_dataset_generated_test.go index 776a258c5f..8759a3b606 100644 --- a/google/resource_bigquery_dataset_generated_test.go +++ b/google/resource_bigquery_dataset_generated_test.go @@ -218,74 +218,6 @@ resource "google_service_account" "bqowner" { `, context) } -func TestAccBigQueryDataset_bigqueryDatasetAuthorizedRoutineExample(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "service_account": getTestServiceAccountFromEnv(t), - "random_suffix": randString(t, 10), - } - - vcrTest(t, resource.TestCase{ - PreCheck: func() { testAccPreCheck(t) }, - Providers: testAccProviders, - CheckDestroy: testAccCheckBigQueryDatasetDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccBigQueryDataset_bigqueryDatasetAuthorizedRoutineExample(context), - }, - { - ResourceName: "google_bigquery_dataset.private", - ImportState: true, - ImportStateVerify: true, - }, - }, - }) -} - -func testAccBigQueryDataset_bigqueryDatasetAuthorizedRoutineExample(context map[string]interface{}) string { - return Nprintf(` -resource "google_bigquery_dataset" "public" { - dataset_id = "tf_test_public_dataset%{random_suffix}" - description = "This dataset is public" -} - -resource "google_bigquery_routine" "public" { - dataset_id = google_bigquery_dataset.public.dataset_id - routine_id = "tf_test_public_routine%{random_suffix}" - routine_type = "TABLE_VALUED_FUNCTION" - language = "SQL" - definition_body = <<-EOS - SELECT 1 + value AS value - EOS - arguments { - name = "value" - argument_kind = "FIXED_TYPE" - data_type = jsonencode({ "typeKind" = "INT64" }) - } - return_table_type = jsonencode({ "columns" = [ - { "name" = "value", "type" = { "typeKind" = "INT64" } }, - ] }) -} - -resource "google_bigquery_dataset" "private" { - dataset_id = "tf_test_private_dataset%{random_suffix}" - description = "This dataset is private" - access { - role = "OWNER" - user_by_email = "%{service_account}" - } - access { - routine { - project_id = google_bigquery_routine.public.project - dataset_id = google_bigquery_routine.public.dataset_id - routine_id = google_bigquery_routine.public.routine_id - } - } -} -`, context) -} - func testAccCheckBigQueryDatasetDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { for name, rs := range s.RootModule().Resources { diff --git a/website/docs/r/bigquery_dataset.html.markdown b/website/docs/r/bigquery_dataset.html.markdown index c231bcb609..d66c83d01b 100644 --- a/website/docs/r/bigquery_dataset.html.markdown +++ b/website/docs/r/bigquery_dataset.html.markdown @@ -161,49 +161,6 @@ resource "google_service_account" "bqowner" { account_id = "bqowner" } ``` -## Example Usage - Bigquery Dataset Authorized Routine - - -```hcl -resource "google_bigquery_dataset" "public" { - dataset_id = "public_dataset" - description = "This dataset is public" -} - -resource "google_bigquery_routine" "public" { - dataset_id = google_bigquery_dataset.public.dataset_id - routine_id = "public_routine" - routine_type = "TABLE_VALUED_FUNCTION" - language = "SQL" - definition_body = <<-EOS - SELECT 1 + value AS value - EOS - arguments { - name = "value" - argument_kind = "FIXED_TYPE" - data_type = jsonencode({ "typeKind" = "INT64" }) - } - return_table_type = jsonencode({ "columns" = [ - { "name" = "value", "type" = { "typeKind" = "INT64" } }, - ] }) -} - -resource "google_bigquery_dataset" "private" { - dataset_id = "private_dataset" - description = "This dataset is private" - access { - role = "OWNER" - user_by_email = "emailAddress:my@service-account.com" - } - access { - routine { - project_id = google_bigquery_routine.public.project - dataset_id = google_bigquery_routine.public.dataset_id - routine_id = google_bigquery_routine.public.routine_id - } - } -} -``` ## Argument Reference @@ -352,15 +309,6 @@ destroying the resource will fail if tables are present. Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is [documented below](#nested_dataset). -* `routine` - - (Optional) - A routine from a different dataset to grant access to. Queries - executed against that routine will have read access to tables in - this dataset. The role field is not required when this field is - set. If that routine is updated by any user, access to the routine - needs to be granted again via an update operation. - Structure is [documented below](#nested_routine). - The `view` block supports: @@ -401,22 +349,6 @@ destroying the resource will fail if tables are present. (Required) The ID of the project containing this table. -The `routine` block supports: - -* `dataset_id` - - (Required) - The ID of the dataset containing this table. - -* `project_id` - - (Required) - The ID of the project containing this table. - -* `routine_id` - - (Required) - The ID of the routine. The ID must contain only letters (a-z, - A-Z), numbers (0-9), or underscores (_). The maximum length - is 256 characters. - The `default_encryption_configuration` block supports: * `kms_key_name` - diff --git a/website/docs/r/bigquery_dataset_access.html.markdown b/website/docs/r/bigquery_dataset_access.html.markdown index ac262e19af..f14de7b273 100644 --- a/website/docs/r/bigquery_dataset_access.html.markdown +++ b/website/docs/r/bigquery_dataset_access.html.markdown @@ -112,47 +112,6 @@ resource "google_bigquery_dataset" "public" { dataset_id = "public" } ``` -## Example Usage - Bigquery Dataset Access Authorized Routine - - -```hcl -resource "google_bigquery_dataset" "public" { - dataset_id = "public_dataset" - description = "This dataset is public" -} - -resource "google_bigquery_routine" "public" { - dataset_id = google_bigquery_dataset.public.dataset_id - routine_id = "public_routine" - routine_type = "TABLE_VALUED_FUNCTION" - language = "SQL" - definition_body = <<-EOS - SELECT 1 + value AS value - EOS - arguments { - name = "value" - argument_kind = "FIXED_TYPE" - data_type = jsonencode({ "typeKind" = "INT64" }) - } - return_table_type = jsonencode({ "columns" = [ - { "name" = "value", "type" = { "typeKind" = "INT64" } }, - ] }) -} - -resource "google_bigquery_dataset" "private" { - dataset_id = "private_dataset" - description = "This dataset is private" -} - -resource "google_bigquery_dataset_access" "authorized_routine" { - dataset_id = google_bigquery_dataset.private.dataset_id - routine { - project_id = google_bigquery_routine.public.project - dataset_id = google_bigquery_routine.public.dataset_id - routine_id = google_bigquery_routine.public.routine_id - } -} -``` ## Argument Reference @@ -223,15 +182,6 @@ The following arguments are supported: Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is [documented below](#nested_dataset). -* `routine` - - (Optional) - A routine from a different dataset to grant access to. Queries - executed against that routine will have read access to tables in - this dataset. The role field is not required when this field is - set. If that routine is updated by any user, access to the routine - needs to be granted again via an update operation. - Structure is [documented below](#nested_routine). - * `project` - (Optional) The ID of the project in which the resource belongs. If it is not provided, the provider project is used. @@ -275,22 +225,6 @@ The following arguments are supported: (Required) The ID of the project containing this table. -The `routine` block supports: - -* `dataset_id` - - (Required) - The ID of the dataset containing this table. - -* `project_id` - - (Required) - The ID of the project containing this table. - -* `routine_id` - - (Required) - The ID of the routine. The ID must contain only letters (a-z, - A-Z), numbers (0-9), or underscores (_). The maximum length - is 256 characters. - ## Attributes Reference In addition to the arguments listed above, the following computed attributes are exported: